5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinContents(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
431 __all__.append('BinContents')
433 ################################################################################
435 class DBBinary(ORMObject):
436 def __init__(self, package = None, source = None, version = None, \
437 maintainer = None, architecture = None, poolfile = None, \
439 self.package = package
441 self.version = version
442 self.maintainer = maintainer
443 self.architecture = architecture
444 self.poolfile = poolfile
445 self.binarytype = binarytype
447 def properties(self):
448 return ['package', 'version', 'maintainer', 'source', 'architecture', \
449 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
450 'suites_count', 'binary_id']
452 def not_null_constraints(self):
453 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
456 __all__.append('DBBinary')
459 def get_suites_binary_in(package, session=None):
461 Returns list of Suite objects which given C{package} name is in
464 @param package: DBBinary package name to search for
467 @return: list of Suite objects for the given package
470 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
472 __all__.append('get_suites_binary_in')
475 def get_binary_from_name_suite(package, suitename, session=None):
476 ### For dak examine-package
477 ### XXX: Doesn't use object API yet
479 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
480 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
481 WHERE b.package='%(package)s'
483 AND fi.location = l.id
484 AND l.component = c.id
487 AND su.suite_name %(suitename)s
488 ORDER BY b.version DESC"""
490 return session.execute(sql % {'package': package, 'suitename': suitename})
492 __all__.append('get_binary_from_name_suite')
495 def get_binary_components(package, suitename, arch, session=None):
496 # Check for packages that have moved from one component to another
497 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
498 WHERE b.package=:package AND s.suite_name=:suitename
499 AND (a.arch_string = :arch OR a.arch_string = 'all')
500 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
501 AND f.location = l.id
502 AND l.component = c.id
505 vals = {'package': package, 'suitename': suitename, 'arch': arch}
507 return session.execute(query, vals)
509 __all__.append('get_binary_components')
511 ################################################################################
513 class BinaryACL(object):
514 def __init__(self, *args, **kwargs):
518 return '<BinaryACL %s>' % self.binary_acl_id
520 __all__.append('BinaryACL')
522 ################################################################################
524 class BinaryACLMap(object):
525 def __init__(self, *args, **kwargs):
529 return '<BinaryACLMap %s>' % self.binary_acl_map_id
531 __all__.append('BinaryACLMap')
533 ################################################################################
538 ArchiveDir "%(archivepath)s";
539 OverrideDir "%(overridedir)s";
540 CacheDir "%(cachedir)s";
545 Packages::Compress ". bzip2 gzip";
546 Sources::Compress ". bzip2 gzip";
551 bindirectory "incoming"
556 BinOverride "override.sid.all3";
557 BinCacheDB "packages-accepted.db";
559 FileList "%(filelist)s";
562 Packages::Extensions ".deb .udeb";
565 bindirectory "incoming/"
568 BinOverride "override.sid.all3";
569 SrcOverride "override.sid.all3.src";
570 FileList "%(filelist)s";
574 class BuildQueue(object):
575 def __init__(self, *args, **kwargs):
579 return '<BuildQueue %s>' % self.queue_name
581 def write_metadata(self, starttime, force=False):
582 # Do we write out metafiles?
583 if not (force or self.generate_metadata):
586 session = DBConn().session().object_session(self)
588 fl_fd = fl_name = ac_fd = ac_name = None
590 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
591 startdir = os.getcwd()
594 # Grab files we want to include
595 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
596 # Write file list with newer files
597 (fl_fd, fl_name) = mkstemp()
599 os.write(fl_fd, '%s\n' % n.fullpath)
604 # Write minimal apt.conf
605 # TODO: Remove hardcoding from template
606 (ac_fd, ac_name) = mkstemp()
607 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
609 'cachedir': cnf["Dir::Cache"],
610 'overridedir': cnf["Dir::Override"],
614 # Run apt-ftparchive generate
615 os.chdir(os.path.dirname(ac_name))
616 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
618 # Run apt-ftparchive release
619 # TODO: Eww - fix this
620 bname = os.path.basename(self.path)
624 # We have to remove the Release file otherwise it'll be included in the
627 os.unlink(os.path.join(bname, 'Release'))
631 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
633 # Crude hack with open and append, but this whole section is and should be redone.
634 if self.notautomatic:
635 release=open("Release", "a")
636 release.write("NotAutomatic: yes")
641 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
642 if cnf.has_key("Dinstall::SigningPubKeyring"):
643 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
645 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
647 # Move the files if we got this far
648 os.rename('Release', os.path.join(bname, 'Release'))
650 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
652 # Clean up any left behind files
679 def clean_and_update(self, starttime, Logger, dryrun=False):
680 """WARNING: This routine commits for you"""
681 session = DBConn().session().object_session(self)
683 if self.generate_metadata and not dryrun:
684 self.write_metadata(starttime)
686 # Grab files older than our execution time
687 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
693 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
695 Logger.log(["I: Removing %s from the queue" % o.fullpath])
696 os.unlink(o.fullpath)
699 # If it wasn't there, don't worry
700 if e.errno == ENOENT:
703 # TODO: Replace with proper logging call
704 Logger.log(["E: Could not remove %s" % o.fullpath])
711 for f in os.listdir(self.path):
712 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
716 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
717 except NoResultFound:
718 fp = os.path.join(self.path, f)
720 Logger.log(["I: Would remove unused link %s" % fp])
722 Logger.log(["I: Removing unused link %s" % fp])
726 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
728 def add_file_from_pool(self, poolfile):
729 """Copies a file into the pool. Assumes that the PoolFile object is
730 attached to the same SQLAlchemy session as the Queue object is.
732 The caller is responsible for committing after calling this function."""
733 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
735 # Check if we have a file of this name or this ID already
736 for f in self.queuefiles:
737 if f.fileid is not None and f.fileid == poolfile.file_id or \
738 f.poolfile.filename == poolfile_basename:
739 # In this case, update the BuildQueueFile entry so we
740 # don't remove it too early
741 f.lastused = datetime.now()
742 DBConn().session().object_session(poolfile).add(f)
745 # Prepare BuildQueueFile object
746 qf = BuildQueueFile()
747 qf.build_queue_id = self.queue_id
748 qf.lastused = datetime.now()
749 qf.filename = poolfile_basename
751 targetpath = poolfile.fullpath
752 queuepath = os.path.join(self.path, poolfile_basename)
756 # We need to copy instead of symlink
758 utils.copy(targetpath, queuepath)
759 # NULL in the fileid field implies a copy
762 os.symlink(targetpath, queuepath)
763 qf.fileid = poolfile.file_id
767 # Get the same session as the PoolFile is using and add the qf to it
768 DBConn().session().object_session(poolfile).add(qf)
773 __all__.append('BuildQueue')
776 def get_build_queue(queuename, session=None):
778 Returns BuildQueue object for given C{queue name}, creating it if it does not
781 @type queuename: string
782 @param queuename: The name of the queue
784 @type session: Session
785 @param session: Optional SQLA session object (a temporary one will be
786 generated if not supplied)
789 @return: BuildQueue object for the given queue
792 q = session.query(BuildQueue).filter_by(queue_name=queuename)
796 except NoResultFound:
799 __all__.append('get_build_queue')
801 ################################################################################
803 class BuildQueueFile(object):
804 def __init__(self, *args, **kwargs):
808 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
812 return os.path.join(self.buildqueue.path, self.filename)
815 __all__.append('BuildQueueFile')
817 ################################################################################
819 class ChangePendingBinary(object):
820 def __init__(self, *args, **kwargs):
824 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
826 __all__.append('ChangePendingBinary')
828 ################################################################################
830 class ChangePendingFile(object):
831 def __init__(self, *args, **kwargs):
835 return '<ChangePendingFile %s>' % self.change_pending_file_id
837 __all__.append('ChangePendingFile')
839 ################################################################################
841 class ChangePendingSource(object):
842 def __init__(self, *args, **kwargs):
846 return '<ChangePendingSource %s>' % self.change_pending_source_id
848 __all__.append('ChangePendingSource')
850 ################################################################################
852 class Component(object):
853 def __init__(self, *args, **kwargs):
856 def __eq__(self, val):
857 if isinstance(val, str):
858 return (self.component_name == val)
859 # This signals to use the normal comparison operator
860 return NotImplemented
862 def __ne__(self, val):
863 if isinstance(val, str):
864 return (self.component_name != val)
865 # This signals to use the normal comparison operator
866 return NotImplemented
869 return '<Component %s>' % self.component_name
872 __all__.append('Component')
875 def get_component(component, session=None):
877 Returns database id for given C{component}.
879 @type component: string
880 @param component: The name of the override type
883 @return: the database id for the given component
886 component = component.lower()
888 q = session.query(Component).filter_by(component_name=component)
892 except NoResultFound:
895 __all__.append('get_component')
897 ################################################################################
899 class DBConfig(object):
900 def __init__(self, *args, **kwargs):
904 return '<DBConfig %s>' % self.name
906 __all__.append('DBConfig')
908 ################################################################################
911 def get_or_set_contents_file_id(filename, session=None):
913 Returns database id for given filename.
915 If no matching file is found, a row is inserted.
917 @type filename: string
918 @param filename: The filename
919 @type session: SQLAlchemy
920 @param session: Optional SQL session object (a temporary one will be
921 generated if not supplied). If not passed, a commit will be performed at
922 the end of the function, otherwise the caller is responsible for commiting.
925 @return: the database id for the given component
928 q = session.query(ContentFilename).filter_by(filename=filename)
931 ret = q.one().cafilename_id
932 except NoResultFound:
933 cf = ContentFilename()
934 cf.filename = filename
936 session.commit_or_flush()
937 ret = cf.cafilename_id
941 __all__.append('get_or_set_contents_file_id')
944 def get_contents(suite, overridetype, section=None, session=None):
946 Returns contents for a suite / overridetype combination, limiting
947 to a section if not None.
950 @param suite: Suite object
952 @type overridetype: OverrideType
953 @param overridetype: OverrideType object
955 @type section: Section
956 @param section: Optional section object to limit results to
958 @type session: SQLAlchemy
959 @param session: Optional SQL session object (a temporary one will be
960 generated if not supplied)
963 @return: ResultsProxy object set up to return tuples of (filename, section,
967 # find me all of the contents for a given suite
968 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
972 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
973 JOIN content_file_names n ON (c.filename=n.id)
974 JOIN binaries b ON (b.id=c.binary_pkg)
975 JOIN override o ON (o.package=b.package)
976 JOIN section s ON (s.id=o.section)
977 WHERE o.suite = :suiteid AND o.type = :overridetypeid
978 AND b.type=:overridetypename"""
980 vals = {'suiteid': suite.suite_id,
981 'overridetypeid': overridetype.overridetype_id,
982 'overridetypename': overridetype.overridetype}
984 if section is not None:
985 contents_q += " AND s.id = :sectionid"
986 vals['sectionid'] = section.section_id
988 contents_q += " ORDER BY fn"
990 return session.execute(contents_q, vals)
992 __all__.append('get_contents')
994 ################################################################################
996 class ContentFilepath(object):
997 def __init__(self, *args, **kwargs):
1001 return '<ContentFilepath %s>' % self.filepath
1003 __all__.append('ContentFilepath')
1006 def get_or_set_contents_path_id(filepath, session=None):
1008 Returns database id for given path.
1010 If no matching file is found, a row is inserted.
1012 @type filepath: string
1013 @param filepath: The filepath
1015 @type session: SQLAlchemy
1016 @param session: Optional SQL session object (a temporary one will be
1017 generated if not supplied). If not passed, a commit will be performed at
1018 the end of the function, otherwise the caller is responsible for commiting.
1021 @return: the database id for the given path
1024 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1027 ret = q.one().cafilepath_id
1028 except NoResultFound:
1029 cf = ContentFilepath()
1030 cf.filepath = filepath
1032 session.commit_or_flush()
1033 ret = cf.cafilepath_id
1037 __all__.append('get_or_set_contents_path_id')
1039 ################################################################################
1041 class ContentAssociation(object):
1042 def __init__(self, *args, **kwargs):
1046 return '<ContentAssociation %s>' % self.ca_id
1048 __all__.append('ContentAssociation')
1050 def insert_content_paths(binary_id, fullpaths, session=None):
1052 Make sure given path is associated with given binary id
1054 @type binary_id: int
1055 @param binary_id: the id of the binary
1056 @type fullpaths: list
1057 @param fullpaths: the list of paths of the file being associated with the binary
1058 @type session: SQLAlchemy session
1059 @param session: Optional SQLAlchemy session. If this is passed, the caller
1060 is responsible for ensuring a transaction has begun and committing the
1061 results or rolling back based on the result code. If not passed, a commit
1062 will be performed at the end of the function, otherwise the caller is
1063 responsible for commiting.
1065 @return: True upon success
1068 privatetrans = False
1070 session = DBConn().session()
1075 def generate_path_dicts():
1076 for fullpath in fullpaths:
1077 if fullpath.startswith( './' ):
1078 fullpath = fullpath[2:]
1080 yield {'filename':fullpath, 'id': binary_id }
1082 for d in generate_path_dicts():
1083 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1092 traceback.print_exc()
1094 # Only rollback if we set up the session ourself
1101 __all__.append('insert_content_paths')
1103 ################################################################################
1105 class DSCFile(object):
1106 def __init__(self, *args, **kwargs):
1110 return '<DSCFile %s>' % self.dscfile_id
1112 __all__.append('DSCFile')
1115 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1117 Returns a list of DSCFiles which may be empty
1119 @type dscfile_id: int (optional)
1120 @param dscfile_id: the dscfile_id of the DSCFiles to find
1122 @type source_id: int (optional)
1123 @param source_id: the source id related to the DSCFiles to find
1125 @type poolfile_id: int (optional)
1126 @param poolfile_id: the poolfile id related to the DSCFiles to find
1129 @return: Possibly empty list of DSCFiles
1132 q = session.query(DSCFile)
1134 if dscfile_id is not None:
1135 q = q.filter_by(dscfile_id=dscfile_id)
1137 if source_id is not None:
1138 q = q.filter_by(source_id=source_id)
1140 if poolfile_id is not None:
1141 q = q.filter_by(poolfile_id=poolfile_id)
1145 __all__.append('get_dscfiles')
1147 ################################################################################
1149 class PoolFile(ORMObject):
1150 def __init__(self, filename = None, location = None, filesize = -1, \
1152 self.filename = filename
1153 self.location = location
1154 self.filesize = filesize
1155 self.md5sum = md5sum
1159 return os.path.join(self.location.path, self.filename)
1161 def is_valid(self, filesize = -1, md5sum = None):\
1162 return self.filesize == filesize and self.md5sum == md5sum
1164 def properties(self):
1165 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1166 'sha256sum', 'location', 'source', 'binary', 'last_used']
1168 def not_null_constraints(self):
1169 return ['filename', 'md5sum', 'location']
1171 __all__.append('PoolFile')
1174 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1177 (ValidFileFound [boolean], PoolFile object or None)
1179 @type filename: string
1180 @param filename: the filename of the file to check against the DB
1183 @param filesize: the size of the file to check against the DB
1185 @type md5sum: string
1186 @param md5sum: the md5sum of the file to check against the DB
1188 @type location_id: int
1189 @param location_id: the id of the location to look in
1192 @return: Tuple of length 2.
1193 - If valid pool file found: (C{True}, C{PoolFile object})
1194 - If valid pool file not found:
1195 - (C{False}, C{None}) if no file found
1196 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1199 poolfile = session.query(Location).get(location_id). \
1200 files.filter_by(filename=filename).first()
1202 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1205 return (valid, poolfile)
1207 __all__.append('check_poolfile')
1209 # TODO: the implementation can trivially be inlined at the place where the
1210 # function is called
1212 def get_poolfile_by_id(file_id, session=None):
1214 Returns a PoolFile objects or None for the given id
1217 @param file_id: the id of the file to look for
1219 @rtype: PoolFile or None
1220 @return: either the PoolFile object or None
1223 return session.query(PoolFile).get(file_id)
1225 __all__.append('get_poolfile_by_id')
1228 def get_poolfile_like_name(filename, session=None):
1230 Returns an array of PoolFile objects which are like the given name
1232 @type filename: string
1233 @param filename: the filename of the file to check against the DB
1236 @return: array of PoolFile objects
1239 # TODO: There must be a way of properly using bind parameters with %FOO%
1240 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1244 __all__.append('get_poolfile_like_name')
1247 def add_poolfile(filename, datadict, location_id, session=None):
1249 Add a new file to the pool
1251 @type filename: string
1252 @param filename: filename
1254 @type datadict: dict
1255 @param datadict: dict with needed data
1257 @type location_id: int
1258 @param location_id: database id of the location
1261 @return: the PoolFile object created
1263 poolfile = PoolFile()
1264 poolfile.filename = filename
1265 poolfile.filesize = datadict["size"]
1266 poolfile.md5sum = datadict["md5sum"]
1267 poolfile.sha1sum = datadict["sha1sum"]
1268 poolfile.sha256sum = datadict["sha256sum"]
1269 poolfile.location_id = location_id
1271 session.add(poolfile)
1272 # Flush to get a file id (NB: This is not a commit)
1277 __all__.append('add_poolfile')
1279 ################################################################################
1281 class Fingerprint(ORMObject):
1282 def __init__(self, fingerprint = None):
1283 self.fingerprint = fingerprint
1285 def properties(self):
1286 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1289 def not_null_constraints(self):
1290 return ['fingerprint']
1292 __all__.append('Fingerprint')
1295 def get_fingerprint(fpr, session=None):
1297 Returns Fingerprint object for given fpr.
1300 @param fpr: The fpr to find / add
1302 @type session: SQLAlchemy
1303 @param session: Optional SQL session object (a temporary one will be
1304 generated if not supplied).
1307 @return: the Fingerprint object for the given fpr or None
1310 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1314 except NoResultFound:
1319 __all__.append('get_fingerprint')
1322 def get_or_set_fingerprint(fpr, session=None):
1324 Returns Fingerprint object for given fpr.
1326 If no matching fpr is found, a row is inserted.
1329 @param fpr: The fpr to find / add
1331 @type session: SQLAlchemy
1332 @param session: Optional SQL session object (a temporary one will be
1333 generated if not supplied). If not passed, a commit will be performed at
1334 the end of the function, otherwise the caller is responsible for commiting.
1335 A flush will be performed either way.
1338 @return: the Fingerprint object for the given fpr
1341 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1345 except NoResultFound:
1346 fingerprint = Fingerprint()
1347 fingerprint.fingerprint = fpr
1348 session.add(fingerprint)
1349 session.commit_or_flush()
1354 __all__.append('get_or_set_fingerprint')
1356 ################################################################################
1358 # Helper routine for Keyring class
1359 def get_ldap_name(entry):
1361 for k in ["cn", "mn", "sn"]:
1363 if ret and ret[0] != "" and ret[0] != "-":
1365 return " ".join(name)
1367 ################################################################################
1369 class Keyring(object):
1370 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1371 " --with-colons --fingerprint --fingerprint"
1376 def __init__(self, *args, **kwargs):
1380 return '<Keyring %s>' % self.keyring_name
1382 def de_escape_gpg_str(self, txt):
1383 esclist = re.split(r'(\\x..)', txt)
1384 for x in range(1,len(esclist),2):
1385 esclist[x] = "%c" % (int(esclist[x][2:],16))
1386 return "".join(esclist)
1388 def parse_address(self, uid):
1389 """parses uid and returns a tuple of real name and email address"""
1391 (name, address) = email.Utils.parseaddr(uid)
1392 name = re.sub(r"\s*[(].*[)]", "", name)
1393 name = self.de_escape_gpg_str(name)
1396 return (name, address)
1398 def load_keys(self, keyring):
1399 if not self.keyring_id:
1400 raise Exception('Must be initialized with database information')
1402 k = os.popen(self.gpg_invocation % keyring, "r")
1406 for line in k.xreadlines():
1407 field = line.split(":")
1408 if field[0] == "pub":
1411 (name, addr) = self.parse_address(field[9])
1413 self.keys[key]["email"] = addr
1414 self.keys[key]["name"] = name
1415 self.keys[key]["fingerprints"] = []
1417 elif key and field[0] == "sub" and len(field) >= 12:
1418 signingkey = ("s" in field[11])
1419 elif key and field[0] == "uid":
1420 (name, addr) = self.parse_address(field[9])
1421 if "email" not in self.keys[key] and "@" in addr:
1422 self.keys[key]["email"] = addr
1423 self.keys[key]["name"] = name
1424 elif signingkey and field[0] == "fpr":
1425 self.keys[key]["fingerprints"].append(field[9])
1426 self.fpr_lookup[field[9]] = key
1428 def import_users_from_ldap(self, session):
1432 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1433 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1435 l = ldap.open(LDAPServer)
1436 l.simple_bind_s("","")
1437 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1438 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1439 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1441 ldap_fin_uid_id = {}
1448 uid = entry["uid"][0]
1449 name = get_ldap_name(entry)
1450 fingerprints = entry["keyFingerPrint"]
1452 for f in fingerprints:
1453 key = self.fpr_lookup.get(f, None)
1454 if key not in self.keys:
1456 self.keys[key]["uid"] = uid
1460 keyid = get_or_set_uid(uid, session).uid_id
1461 byuid[keyid] = (uid, name)
1462 byname[uid] = (keyid, name)
1464 return (byname, byuid)
1466 def generate_users_from_keyring(self, format, session):
1470 for x in self.keys.keys():
1471 if "email" not in self.keys[x]:
1473 self.keys[x]["uid"] = format % "invalid-uid"
1475 uid = format % self.keys[x]["email"]
1476 keyid = get_or_set_uid(uid, session).uid_id
1477 byuid[keyid] = (uid, self.keys[x]["name"])
1478 byname[uid] = (keyid, self.keys[x]["name"])
1479 self.keys[x]["uid"] = uid
1482 uid = format % "invalid-uid"
1483 keyid = get_or_set_uid(uid, session).uid_id
1484 byuid[keyid] = (uid, "ungeneratable user id")
1485 byname[uid] = (keyid, "ungeneratable user id")
1487 return (byname, byuid)
1489 __all__.append('Keyring')
1492 def get_keyring(keyring, session=None):
1494 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1495 If C{keyring} already has an entry, simply return the existing Keyring
1497 @type keyring: string
1498 @param keyring: the keyring name
1501 @return: the Keyring object for this keyring
1504 q = session.query(Keyring).filter_by(keyring_name=keyring)
1508 except NoResultFound:
1511 __all__.append('get_keyring')
1513 ################################################################################
1515 class KeyringACLMap(object):
1516 def __init__(self, *args, **kwargs):
1520 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1522 __all__.append('KeyringACLMap')
1524 ################################################################################
1526 class DBChange(object):
1527 def __init__(self, *args, **kwargs):
1531 return '<DBChange %s>' % self.changesname
1533 def clean_from_queue(self):
1534 session = DBConn().session().object_session(self)
1536 # Remove changes_pool_files entries
1539 # Remove changes_pending_files references
1542 # Clear out of queue
1543 self.in_queue = None
1544 self.approved_for_id = None
1546 __all__.append('DBChange')
1549 def get_dbchange(filename, session=None):
1551 returns DBChange object for given C{filename}.
1553 @type filename: string
1554 @param filename: the name of the file
1556 @type session: Session
1557 @param session: Optional SQLA session object (a temporary one will be
1558 generated if not supplied)
1561 @return: DBChange object for the given filename (C{None} if not present)
1564 q = session.query(DBChange).filter_by(changesname=filename)
1568 except NoResultFound:
1571 __all__.append('get_dbchange')
1573 ################################################################################
1575 class Location(ORMObject):
1576 def __init__(self, path = None):
1578 # the column 'type' should go away, see comment at mapper
1579 self.archive_type = 'pool'
1581 def properties(self):
1582 return ['path', 'archive_type', 'component', 'files_count']
1584 def not_null_constraints(self):
1585 return ['path', 'archive_type']
1587 __all__.append('Location')
1590 def get_location(location, component=None, archive=None, session=None):
1592 Returns Location object for the given combination of location, component
1595 @type location: string
1596 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1598 @type component: string
1599 @param component: the component name (if None, no restriction applied)
1601 @type archive: string
1602 @param archive: the archive name (if None, no restriction applied)
1604 @rtype: Location / None
1605 @return: Either a Location object or None if one can't be found
1608 q = session.query(Location).filter_by(path=location)
1610 if archive is not None:
1611 q = q.join(Archive).filter_by(archive_name=archive)
1613 if component is not None:
1614 q = q.join(Component).filter_by(component_name=component)
1618 except NoResultFound:
1621 __all__.append('get_location')
1623 ################################################################################
1625 class Maintainer(ORMObject):
1626 def __init__(self, name = None):
1629 def properties(self):
1630 return ['name', 'maintainer_id']
1632 def not_null_constraints(self):
1635 def get_split_maintainer(self):
1636 if not hasattr(self, 'name') or self.name is None:
1637 return ('', '', '', '')
1639 return fix_maintainer(self.name.strip())
1641 __all__.append('Maintainer')
1644 def get_or_set_maintainer(name, session=None):
1646 Returns Maintainer object for given maintainer name.
1648 If no matching maintainer name is found, a row is inserted.
1651 @param name: The maintainer name to add
1653 @type session: SQLAlchemy
1654 @param session: Optional SQL session object (a temporary one will be
1655 generated if not supplied). If not passed, a commit will be performed at
1656 the end of the function, otherwise the caller is responsible for commiting.
1657 A flush will be performed either way.
1660 @return: the Maintainer object for the given maintainer
1663 q = session.query(Maintainer).filter_by(name=name)
1666 except NoResultFound:
1667 maintainer = Maintainer()
1668 maintainer.name = name
1669 session.add(maintainer)
1670 session.commit_or_flush()
1675 __all__.append('get_or_set_maintainer')
1678 def get_maintainer(maintainer_id, session=None):
1680 Return the name of the maintainer behind C{maintainer_id} or None if that
1681 maintainer_id is invalid.
1683 @type maintainer_id: int
1684 @param maintainer_id: the id of the maintainer
1687 @return: the Maintainer with this C{maintainer_id}
1690 return session.query(Maintainer).get(maintainer_id)
1692 __all__.append('get_maintainer')
1694 ################################################################################
1696 class NewComment(object):
1697 def __init__(self, *args, **kwargs):
1701 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1703 __all__.append('NewComment')
1706 def has_new_comment(package, version, session=None):
1708 Returns true if the given combination of C{package}, C{version} has a comment.
1710 @type package: string
1711 @param package: name of the package
1713 @type version: string
1714 @param version: package version
1716 @type session: Session
1717 @param session: Optional SQLA session object (a temporary one will be
1718 generated if not supplied)
1724 q = session.query(NewComment)
1725 q = q.filter_by(package=package)
1726 q = q.filter_by(version=version)
1728 return bool(q.count() > 0)
1730 __all__.append('has_new_comment')
1733 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1735 Returns (possibly empty) list of NewComment objects for the given
1738 @type package: string (optional)
1739 @param package: name of the package
1741 @type version: string (optional)
1742 @param version: package version
1744 @type comment_id: int (optional)
1745 @param comment_id: An id of a comment
1747 @type session: Session
1748 @param session: Optional SQLA session object (a temporary one will be
1749 generated if not supplied)
1752 @return: A (possibly empty) list of NewComment objects will be returned
1755 q = session.query(NewComment)
1756 if package is not None: q = q.filter_by(package=package)
1757 if version is not None: q = q.filter_by(version=version)
1758 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1762 __all__.append('get_new_comments')
1764 ################################################################################
1766 class Override(object):
1767 def __init__(self, *args, **kwargs):
1771 return '<Override %s (%s)>' % (self.package, self.suite_id)
1773 __all__.append('Override')
1776 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1778 Returns Override object for the given parameters
1780 @type package: string
1781 @param package: The name of the package
1783 @type suite: string, list or None
1784 @param suite: The name of the suite (or suites if a list) to limit to. If
1785 None, don't limit. Defaults to None.
1787 @type component: string, list or None
1788 @param component: The name of the component (or components if a list) to
1789 limit to. If None, don't limit. Defaults to None.
1791 @type overridetype: string, list or None
1792 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1793 limit to. If None, don't limit. Defaults to None.
1795 @type session: Session
1796 @param session: Optional SQLA session object (a temporary one will be
1797 generated if not supplied)
1800 @return: A (possibly empty) list of Override objects will be returned
1803 q = session.query(Override)
1804 q = q.filter_by(package=package)
1806 if suite is not None:
1807 if not isinstance(suite, list): suite = [suite]
1808 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1810 if component is not None:
1811 if not isinstance(component, list): component = [component]
1812 q = q.join(Component).filter(Component.component_name.in_(component))
1814 if overridetype is not None:
1815 if not isinstance(overridetype, list): overridetype = [overridetype]
1816 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1820 __all__.append('get_override')
1823 ################################################################################
1825 class OverrideType(object):
1826 def __init__(self, *args, **kwargs):
1830 return '<OverrideType %s>' % self.overridetype
1832 __all__.append('OverrideType')
1835 def get_override_type(override_type, session=None):
1837 Returns OverrideType object for given C{override type}.
1839 @type override_type: string
1840 @param override_type: The name of the override type
1842 @type session: Session
1843 @param session: Optional SQLA session object (a temporary one will be
1844 generated if not supplied)
1847 @return: the database id for the given override type
1850 q = session.query(OverrideType).filter_by(overridetype=override_type)
1854 except NoResultFound:
1857 __all__.append('get_override_type')
1859 ################################################################################
1861 class DebContents(object):
1862 def __init__(self, *args, **kwargs):
1866 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1868 __all__.append('DebContents')
1871 class UdebContents(object):
1872 def __init__(self, *args, **kwargs):
1876 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1878 __all__.append('UdebContents')
1880 class PendingBinContents(object):
1881 def __init__(self, *args, **kwargs):
1885 return '<PendingBinContents %s>' % self.contents_id
1887 __all__.append('PendingBinContents')
1889 def insert_pending_content_paths(package,
1894 Make sure given paths are temporarily associated with given
1898 @param package: the package to associate with should have been read in from the binary control file
1899 @type fullpaths: list
1900 @param fullpaths: the list of paths of the file being associated with the binary
1901 @type session: SQLAlchemy session
1902 @param session: Optional SQLAlchemy session. If this is passed, the caller
1903 is responsible for ensuring a transaction has begun and committing the
1904 results or rolling back based on the result code. If not passed, a commit
1905 will be performed at the end of the function
1907 @return: True upon success, False if there is a problem
1910 privatetrans = False
1913 session = DBConn().session()
1917 arch = get_architecture(package['Architecture'], session)
1918 arch_id = arch.arch_id
1920 # Remove any already existing recorded files for this package
1921 q = session.query(PendingBinContents)
1922 q = q.filter_by(package=package['Package'])
1923 q = q.filter_by(version=package['Version'])
1924 q = q.filter_by(architecture=arch_id)
1927 for fullpath in fullpaths:
1929 if fullpath.startswith( "./" ):
1930 fullpath = fullpath[2:]
1932 pca = PendingBinContents()
1933 pca.package = package['Package']
1934 pca.version = package['Version']
1936 pca.architecture = arch_id
1939 pca.type = 8 # gross
1941 pca.type = 7 # also gross
1944 # Only commit if we set up the session ourself
1952 except Exception, e:
1953 traceback.print_exc()
1955 # Only rollback if we set up the session ourself
1962 __all__.append('insert_pending_content_paths')
1964 ################################################################################
1966 class PolicyQueue(object):
1967 def __init__(self, *args, **kwargs):
1971 return '<PolicyQueue %s>' % self.queue_name
1973 __all__.append('PolicyQueue')
1976 def get_policy_queue(queuename, session=None):
1978 Returns PolicyQueue object for given C{queue name}
1980 @type queuename: string
1981 @param queuename: The name of the queue
1983 @type session: Session
1984 @param session: Optional SQLA session object (a temporary one will be
1985 generated if not supplied)
1988 @return: PolicyQueue object for the given queue
1991 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1995 except NoResultFound:
1998 __all__.append('get_policy_queue')
2001 def get_policy_queue_from_path(pathname, session=None):
2003 Returns PolicyQueue object for given C{path name}
2005 @type queuename: string
2006 @param queuename: The path
2008 @type session: Session
2009 @param session: Optional SQLA session object (a temporary one will be
2010 generated if not supplied)
2013 @return: PolicyQueue object for the given queue
2016 q = session.query(PolicyQueue).filter_by(path=pathname)
2020 except NoResultFound:
2023 __all__.append('get_policy_queue_from_path')
2025 ################################################################################
2027 class Priority(object):
2028 def __init__(self, *args, **kwargs):
2031 def __eq__(self, val):
2032 if isinstance(val, str):
2033 return (self.priority == val)
2034 # This signals to use the normal comparison operator
2035 return NotImplemented
2037 def __ne__(self, val):
2038 if isinstance(val, str):
2039 return (self.priority != val)
2040 # This signals to use the normal comparison operator
2041 return NotImplemented
2044 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2046 __all__.append('Priority')
2049 def get_priority(priority, session=None):
2051 Returns Priority object for given C{priority name}.
2053 @type priority: string
2054 @param priority: The name of the priority
2056 @type session: Session
2057 @param session: Optional SQLA session object (a temporary one will be
2058 generated if not supplied)
2061 @return: Priority object for the given priority
2064 q = session.query(Priority).filter_by(priority=priority)
2068 except NoResultFound:
2071 __all__.append('get_priority')
2074 def get_priorities(session=None):
2076 Returns dictionary of priority names -> id mappings
2078 @type session: Session
2079 @param session: Optional SQL session object (a temporary one will be
2080 generated if not supplied)
2083 @return: dictionary of priority names -> id mappings
2087 q = session.query(Priority)
2089 ret[x.priority] = x.priority_id
2093 __all__.append('get_priorities')
2095 ################################################################################
2097 class Section(object):
2098 def __init__(self, *args, **kwargs):
2101 def __eq__(self, val):
2102 if isinstance(val, str):
2103 return (self.section == val)
2104 # This signals to use the normal comparison operator
2105 return NotImplemented
2107 def __ne__(self, val):
2108 if isinstance(val, str):
2109 return (self.section != val)
2110 # This signals to use the normal comparison operator
2111 return NotImplemented
2114 return '<Section %s>' % self.section
2116 __all__.append('Section')
2119 def get_section(section, session=None):
2121 Returns Section object for given C{section name}.
2123 @type section: string
2124 @param section: The name of the section
2126 @type session: Session
2127 @param session: Optional SQLA session object (a temporary one will be
2128 generated if not supplied)
2131 @return: Section object for the given section name
2134 q = session.query(Section).filter_by(section=section)
2138 except NoResultFound:
2141 __all__.append('get_section')
2144 def get_sections(session=None):
2146 Returns dictionary of section names -> id mappings
2148 @type session: Session
2149 @param session: Optional SQL session object (a temporary one will be
2150 generated if not supplied)
2153 @return: dictionary of section names -> id mappings
2157 q = session.query(Section)
2159 ret[x.section] = x.section_id
2163 __all__.append('get_sections')
2165 ################################################################################
2167 class DBSource(ORMObject):
2168 def __init__(self, source = None, version = None, maintainer = None, \
2169 changedby = None, poolfile = None, install_date = None):
2170 self.source = source
2171 self.version = version
2172 self.maintainer = maintainer
2173 self.changedby = changedby
2174 self.poolfile = poolfile
2175 self.install_date = install_date
2177 def properties(self):
2178 return ['source', 'source_id', 'maintainer', 'changedby', \
2179 'fingerprint', 'poolfile', 'version', 'suites_count', \
2180 'install_date', 'binaries_count']
2182 def not_null_constraints(self):
2183 return ['source', 'version', 'install_date', 'maintainer', \
2184 'changedby', 'poolfile', 'install_date']
2186 __all__.append('DBSource')
2189 def source_exists(source, source_version, suites = ["any"], session=None):
2191 Ensure that source exists somewhere in the archive for the binary
2192 upload being processed.
2193 1. exact match => 1.0-3
2194 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2196 @type source: string
2197 @param source: source name
2199 @type source_version: string
2200 @param source_version: expected source version
2203 @param suites: list of suites to check in, default I{any}
2205 @type session: Session
2206 @param session: Optional SQLA session object (a temporary one will be
2207 generated if not supplied)
2210 @return: returns 1 if a source with expected version is found, otherwise 0
2217 from daklib.regexes import re_bin_only_nmu
2218 orig_source_version = re_bin_only_nmu.sub('', source_version)
2220 for suite in suites:
2221 q = session.query(DBSource).filter_by(source=source). \
2222 filter(DBSource.version.in_([source_version, orig_source_version]))
2224 # source must exist in suite X, or in some other suite that's
2225 # mapped to X, recursively... silent-maps are counted too,
2226 # unreleased-maps aren't.
2227 maps = cnf.ValueList("SuiteMappings")[:]
2229 maps = [ m.split() for m in maps ]
2230 maps = [ (x[1], x[2]) for x in maps
2231 if x[0] == "map" or x[0] == "silent-map" ]
2234 if x[1] in s and x[0] not in s:
2237 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2242 # No source found so return not ok
2247 __all__.append('source_exists')
2250 def get_suites_source_in(source, session=None):
2252 Returns list of Suite objects which given C{source} name is in
2255 @param source: DBSource package name to search for
2258 @return: list of Suite objects for the given source
2261 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2263 __all__.append('get_suites_source_in')
2266 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2268 Returns list of DBSource objects for given C{source} name and other parameters
2271 @param source: DBSource package name to search for
2273 @type version: str or None
2274 @param version: DBSource version name to search for or None if not applicable
2276 @type dm_upload_allowed: bool
2277 @param dm_upload_allowed: If None, no effect. If True or False, only
2278 return packages with that dm_upload_allowed setting
2280 @type session: Session
2281 @param session: Optional SQL session object (a temporary one will be
2282 generated if not supplied)
2285 @return: list of DBSource objects for the given name (may be empty)
2288 q = session.query(DBSource).filter_by(source=source)
2290 if version is not None:
2291 q = q.filter_by(version=version)
2293 if dm_upload_allowed is not None:
2294 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2298 __all__.append('get_sources_from_name')
2300 # FIXME: This function fails badly if it finds more than 1 source package and
2301 # its implementation is trivial enough to be inlined.
2303 def get_source_in_suite(source, suite, session=None):
2305 Returns a DBSource object for a combination of C{source} and C{suite}.
2307 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2308 - B{suite} - a suite name, eg. I{unstable}
2310 @type source: string
2311 @param source: source package name
2314 @param suite: the suite name
2317 @return: the version for I{source} in I{suite}
2321 q = get_suite(suite, session).get_sources(source)
2324 except NoResultFound:
2327 __all__.append('get_source_in_suite')
2329 ################################################################################
2332 def add_dsc_to_db(u, filename, session=None):
2333 entry = u.pkg.files[filename]
2337 source.source = u.pkg.dsc["source"]
2338 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2339 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2340 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2341 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2342 source.install_date = datetime.now().date()
2344 dsc_component = entry["component"]
2345 dsc_location_id = entry["location id"]
2347 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2349 # Set up a new poolfile if necessary
2350 if not entry.has_key("files id") or not entry["files id"]:
2351 filename = entry["pool name"] + filename
2352 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2354 pfs.append(poolfile)
2355 entry["files id"] = poolfile.file_id
2357 source.poolfile_id = entry["files id"]
2360 suite_names = u.pkg.changes["distribution"].keys()
2361 source.suites = session.query(Suite). \
2362 filter(Suite.suite_name.in_(suite_names)).all()
2364 # Add the source files to the DB (files and dsc_files)
2366 dscfile.source_id = source.source_id
2367 dscfile.poolfile_id = entry["files id"]
2368 session.add(dscfile)
2370 for dsc_file, dentry in u.pkg.dsc_files.items():
2372 df.source_id = source.source_id
2374 # If the .orig tarball is already in the pool, it's
2375 # files id is stored in dsc_files by check_dsc().
2376 files_id = dentry.get("files id", None)
2378 # Find the entry in the files hash
2379 # TODO: Bail out here properly
2381 for f, e in u.pkg.files.items():
2386 if files_id is None:
2387 filename = dfentry["pool name"] + dsc_file
2389 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2390 # FIXME: needs to check for -1/-2 and or handle exception
2391 if found and obj is not None:
2392 files_id = obj.file_id
2395 # If still not found, add it
2396 if files_id is None:
2397 # HACK: Force sha1sum etc into dentry
2398 dentry["sha1sum"] = dfentry["sha1sum"]
2399 dentry["sha256sum"] = dfentry["sha256sum"]
2400 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2401 pfs.append(poolfile)
2402 files_id = poolfile.file_id
2404 poolfile = get_poolfile_by_id(files_id, session)
2405 if poolfile is None:
2406 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2407 pfs.append(poolfile)
2409 df.poolfile_id = files_id
2412 # Add the src_uploaders to the DB
2413 uploader_ids = [source.maintainer_id]
2414 if u.pkg.dsc.has_key("uploaders"):
2415 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2417 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2420 for up_id in uploader_ids:
2421 if added_ids.has_key(up_id):
2423 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2429 su.maintainer_id = up_id
2430 su.source_id = source.source_id
2435 return source, dsc_component, dsc_location_id, pfs
2437 __all__.append('add_dsc_to_db')
2440 def add_deb_to_db(u, filename, session=None):
2442 Contrary to what you might expect, this routine deals with both
2443 debs and udebs. That info is in 'dbtype', whilst 'type' is
2444 'deb' for both of them
2447 entry = u.pkg.files[filename]
2450 bin.package = entry["package"]
2451 bin.version = entry["version"]
2452 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2453 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2454 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2455 bin.binarytype = entry["dbtype"]
2458 filename = entry["pool name"] + filename
2459 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2460 if not entry.get("location id", None):
2461 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2463 if entry.get("files id", None):
2464 poolfile = get_poolfile_by_id(bin.poolfile_id)
2465 bin.poolfile_id = entry["files id"]
2467 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2468 bin.poolfile_id = entry["files id"] = poolfile.file_id
2471 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2472 if len(bin_sources) != 1:
2473 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2474 (bin.package, bin.version, entry["architecture"],
2475 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2477 bin.source_id = bin_sources[0].source_id
2479 # Add and flush object so it has an ID
2482 suite_names = u.pkg.changes["distribution"].keys()
2483 bin.suites = session.query(Suite). \
2484 filter(Suite.suite_name.in_(suite_names)).all()
2488 # Deal with contents - disabled for now
2489 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2491 # print "REJECT\nCould not determine contents of package %s" % bin.package
2492 # session.rollback()
2493 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2497 __all__.append('add_deb_to_db')
2499 ################################################################################
2501 class SourceACL(object):
2502 def __init__(self, *args, **kwargs):
2506 return '<SourceACL %s>' % self.source_acl_id
2508 __all__.append('SourceACL')
2510 ################################################################################
2512 class SrcFormat(object):
2513 def __init__(self, *args, **kwargs):
2517 return '<SrcFormat %s>' % (self.format_name)
2519 __all__.append('SrcFormat')
2521 ################################################################################
2523 class SrcUploader(object):
2524 def __init__(self, *args, **kwargs):
2528 return '<SrcUploader %s>' % self.uploader_id
2530 __all__.append('SrcUploader')
2532 ################################################################################
2534 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2535 ('SuiteID', 'suite_id'),
2536 ('Version', 'version'),
2537 ('Origin', 'origin'),
2539 ('Description', 'description'),
2540 ('Untouchable', 'untouchable'),
2541 ('Announce', 'announce'),
2542 ('Codename', 'codename'),
2543 ('OverrideCodename', 'overridecodename'),
2544 ('ValidTime', 'validtime'),
2545 ('Priority', 'priority'),
2546 ('NotAutomatic', 'notautomatic'),
2547 ('CopyChanges', 'copychanges'),
2548 ('OverrideSuite', 'overridesuite')]
2550 # Why the heck don't we have any UNIQUE constraints in table suite?
2551 # TODO: Add UNIQUE constraints for appropriate columns.
2552 class Suite(ORMObject):
2553 def __init__(self, suite_name = None, version = None):
2554 self.suite_name = suite_name
2555 self.version = version
2557 def properties(self):
2558 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2560 def not_null_constraints(self):
2561 return ['suite_name', 'version']
2563 def __eq__(self, val):
2564 if isinstance(val, str):
2565 return (self.suite_name == val)
2566 # This signals to use the normal comparison operator
2567 return NotImplemented
2569 def __ne__(self, val):
2570 if isinstance(val, str):
2571 return (self.suite_name != val)
2572 # This signals to use the normal comparison operator
2573 return NotImplemented
2577 for disp, field in SUITE_FIELDS:
2578 val = getattr(self, field, None)
2580 ret.append("%s: %s" % (disp, val))
2582 return "\n".join(ret)
2584 def get_architectures(self, skipsrc=False, skipall=False):
2586 Returns list of Architecture objects
2588 @type skipsrc: boolean
2589 @param skipsrc: Whether to skip returning the 'source' architecture entry
2592 @type skipall: boolean
2593 @param skipall: Whether to skip returning the 'all' architecture entry
2597 @return: list of Architecture objects for the given name (may be empty)
2600 q = object_session(self).query(Architecture).with_parent(self)
2602 q = q.filter(Architecture.arch_string != 'source')
2604 q = q.filter(Architecture.arch_string != 'all')
2605 return q.order_by(Architecture.arch_string).all()
2607 def get_sources(self, source):
2609 Returns a query object representing DBSource that is part of C{suite}.
2611 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2613 @type source: string
2614 @param source: source package name
2616 @rtype: sqlalchemy.orm.query.Query
2617 @return: a query of DBSource
2621 session = object_session(self)
2622 return session.query(DBSource).filter_by(source = source). \
2625 __all__.append('Suite')
2628 def get_suite(suite, session=None):
2630 Returns Suite object for given C{suite name}.
2633 @param suite: The name of the suite
2635 @type session: Session
2636 @param session: Optional SQLA session object (a temporary one will be
2637 generated if not supplied)
2640 @return: Suite object for the requested suite name (None if not present)
2643 q = session.query(Suite).filter_by(suite_name=suite)
2647 except NoResultFound:
2650 __all__.append('get_suite')
2652 ################################################################################
2654 # TODO: should be removed because the implementation is too trivial
2656 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2658 Returns list of Architecture objects for given C{suite} name
2661 @param suite: Suite name to search for
2663 @type skipsrc: boolean
2664 @param skipsrc: Whether to skip returning the 'source' architecture entry
2667 @type skipall: boolean
2668 @param skipall: Whether to skip returning the 'all' architecture entry
2671 @type session: Session
2672 @param session: Optional SQL session object (a temporary one will be
2673 generated if not supplied)
2676 @return: list of Architecture objects for the given name (may be empty)
2679 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2681 __all__.append('get_suite_architectures')
2683 ################################################################################
2685 class SuiteSrcFormat(object):
2686 def __init__(self, *args, **kwargs):
2690 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2692 __all__.append('SuiteSrcFormat')
2695 def get_suite_src_formats(suite, session=None):
2697 Returns list of allowed SrcFormat for C{suite}.
2700 @param suite: Suite name to search for
2702 @type session: Session
2703 @param session: Optional SQL session object (a temporary one will be
2704 generated if not supplied)
2707 @return: the list of allowed source formats for I{suite}
2710 q = session.query(SrcFormat)
2711 q = q.join(SuiteSrcFormat)
2712 q = q.join(Suite).filter_by(suite_name=suite)
2713 q = q.order_by('format_name')
2717 __all__.append('get_suite_src_formats')
2719 ################################################################################
2721 class Uid(ORMObject):
2722 def __init__(self, uid = None, name = None):
2726 def __eq__(self, val):
2727 if isinstance(val, str):
2728 return (self.uid == val)
2729 # This signals to use the normal comparison operator
2730 return NotImplemented
2732 def __ne__(self, val):
2733 if isinstance(val, str):
2734 return (self.uid != val)
2735 # This signals to use the normal comparison operator
2736 return NotImplemented
2738 def properties(self):
2739 return ['uid', 'name', 'fingerprint']
2741 def not_null_constraints(self):
2744 __all__.append('Uid')
2747 def get_or_set_uid(uidname, session=None):
2749 Returns uid object for given uidname.
2751 If no matching uidname is found, a row is inserted.
2753 @type uidname: string
2754 @param uidname: The uid to add
2756 @type session: SQLAlchemy
2757 @param session: Optional SQL session object (a temporary one will be
2758 generated if not supplied). If not passed, a commit will be performed at
2759 the end of the function, otherwise the caller is responsible for commiting.
2762 @return: the uid object for the given uidname
2765 q = session.query(Uid).filter_by(uid=uidname)
2769 except NoResultFound:
2773 session.commit_or_flush()
2778 __all__.append('get_or_set_uid')
2781 def get_uid_from_fingerprint(fpr, session=None):
2782 q = session.query(Uid)
2783 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2787 except NoResultFound:
2790 __all__.append('get_uid_from_fingerprint')
2792 ################################################################################
2794 class UploadBlock(object):
2795 def __init__(self, *args, **kwargs):
2799 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2801 __all__.append('UploadBlock')
2803 ################################################################################
2805 class DBConn(object):
2807 database module init.
2811 def __init__(self, *args, **kwargs):
2812 self.__dict__ = self.__shared_state
2814 if not getattr(self, 'initialised', False):
2815 self.initialised = True
2816 self.debug = kwargs.has_key('debug')
2819 def __setuptables(self):
2820 tables_with_primary = (
2831 'changes_pending_binaries',
2832 'changes_pending_files',
2833 'changes_pending_source',
2843 'pending_bin_contents',
2855 # The following tables have primary keys but sqlalchemy
2856 # version 0.5 fails to reflect them correctly with database
2857 # versions before upgrade #41.
2859 #'build_queue_files',
2862 tables_no_primary = (
2864 'changes_pending_files_map',
2865 'changes_pending_source_files',
2866 'changes_pool_files',
2869 'suite_architectures',
2870 'suite_src_formats',
2871 'suite_build_queue_copy',
2873 # see the comment above
2875 'build_queue_files',
2879 'almost_obsolete_all_associations',
2880 'almost_obsolete_src_associations',
2881 'any_associations_source',
2882 'bin_assoc_by_arch',
2883 'bin_associations_binaries',
2884 'binaries_suite_arch',
2885 'binfiles_suite_component_arch',
2888 'newest_all_associations',
2889 'newest_any_associations',
2891 'newest_src_association',
2892 'obsolete_all_associations',
2893 'obsolete_any_associations',
2894 'obsolete_any_by_all_associations',
2895 'obsolete_src_associations',
2897 'src_associations_bin',
2898 'src_associations_src',
2899 'suite_arch_by_name',
2902 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2903 # correctly and that is why we have to use a workaround. It can
2904 # be removed as soon as we switch to version 0.6.
2905 for table_name in tables_with_primary:
2906 table = Table(table_name, self.db_meta, \
2907 Column('id', Integer, primary_key = True), \
2908 autoload=True, useexisting=True)
2909 setattr(self, 'tbl_%s' % table_name, table)
2911 for table_name in tables_no_primary:
2912 table = Table(table_name, self.db_meta, autoload=True)
2913 setattr(self, 'tbl_%s' % table_name, table)
2915 for view_name in views:
2916 view = Table(view_name, self.db_meta, autoload=True)
2917 setattr(self, 'view_%s' % view_name, view)
2919 def __setupmappers(self):
2920 mapper(Architecture, self.tbl_architecture,
2921 properties = dict(arch_id = self.tbl_architecture.c.id,
2922 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2923 order_by='suite_name',
2924 backref=backref('architectures', order_by='arch_string'))),
2925 extension = validator)
2927 mapper(Archive, self.tbl_archive,
2928 properties = dict(archive_id = self.tbl_archive.c.id,
2929 archive_name = self.tbl_archive.c.name))
2931 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2932 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2933 filename = self.tbl_pending_bin_contents.c.filename,
2934 package = self.tbl_pending_bin_contents.c.package,
2935 version = self.tbl_pending_bin_contents.c.version,
2936 arch = self.tbl_pending_bin_contents.c.arch,
2937 otype = self.tbl_pending_bin_contents.c.type))
2939 mapper(DebContents, self.tbl_deb_contents,
2940 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2941 package=self.tbl_deb_contents.c.package,
2942 suite=self.tbl_deb_contents.c.suite,
2943 arch=self.tbl_deb_contents.c.arch,
2944 section=self.tbl_deb_contents.c.section,
2945 filename=self.tbl_deb_contents.c.filename))
2947 mapper(UdebContents, self.tbl_udeb_contents,
2948 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2949 package=self.tbl_udeb_contents.c.package,
2950 suite=self.tbl_udeb_contents.c.suite,
2951 arch=self.tbl_udeb_contents.c.arch,
2952 section=self.tbl_udeb_contents.c.section,
2953 filename=self.tbl_udeb_contents.c.filename))
2955 mapper(BuildQueue, self.tbl_build_queue,
2956 properties = dict(queue_id = self.tbl_build_queue.c.id))
2958 mapper(BuildQueueFile, self.tbl_build_queue_files,
2959 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2960 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2962 mapper(DBBinary, self.tbl_binaries,
2963 properties = dict(binary_id = self.tbl_binaries.c.id,
2964 package = self.tbl_binaries.c.package,
2965 version = self.tbl_binaries.c.version,
2966 maintainer_id = self.tbl_binaries.c.maintainer,
2967 maintainer = relation(Maintainer),
2968 source_id = self.tbl_binaries.c.source,
2969 source = relation(DBSource, backref='binaries'),
2970 arch_id = self.tbl_binaries.c.architecture,
2971 architecture = relation(Architecture),
2972 poolfile_id = self.tbl_binaries.c.file,
2973 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
2974 binarytype = self.tbl_binaries.c.type,
2975 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2976 fingerprint = relation(Fingerprint),
2977 install_date = self.tbl_binaries.c.install_date,
2978 suites = relation(Suite, secondary=self.tbl_bin_associations,
2979 backref=backref('binaries', lazy='dynamic'))),
2980 extension = validator)
2982 mapper(BinaryACL, self.tbl_binary_acl,
2983 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2985 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2986 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2987 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2988 architecture = relation(Architecture)))
2990 mapper(Component, self.tbl_component,
2991 properties = dict(component_id = self.tbl_component.c.id,
2992 component_name = self.tbl_component.c.name))
2994 mapper(DBConfig, self.tbl_config,
2995 properties = dict(config_id = self.tbl_config.c.id))
2997 mapper(DSCFile, self.tbl_dsc_files,
2998 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2999 source_id = self.tbl_dsc_files.c.source,
3000 source = relation(DBSource),
3001 poolfile_id = self.tbl_dsc_files.c.file,
3002 poolfile = relation(PoolFile)))
3004 mapper(PoolFile, self.tbl_files,
3005 properties = dict(file_id = self.tbl_files.c.id,
3006 filesize = self.tbl_files.c.size,
3007 location_id = self.tbl_files.c.location,
3008 location = relation(Location,
3009 # using lazy='dynamic' in the back
3010 # reference because we have A LOT of
3011 # files in one location
3012 backref=backref('files', lazy='dynamic'))),
3013 extension = validator)
3015 mapper(Fingerprint, self.tbl_fingerprint,
3016 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3017 uid_id = self.tbl_fingerprint.c.uid,
3018 uid = relation(Uid),
3019 keyring_id = self.tbl_fingerprint.c.keyring,
3020 keyring = relation(Keyring),
3021 source_acl = relation(SourceACL),
3022 binary_acl = relation(BinaryACL)),
3023 extension = validator)
3025 mapper(Keyring, self.tbl_keyrings,
3026 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3027 keyring_id = self.tbl_keyrings.c.id))
3029 mapper(DBChange, self.tbl_changes,
3030 properties = dict(change_id = self.tbl_changes.c.id,
3031 poolfiles = relation(PoolFile,
3032 secondary=self.tbl_changes_pool_files,
3033 backref="changeslinks"),
3034 seen = self.tbl_changes.c.seen,
3035 source = self.tbl_changes.c.source,
3036 binaries = self.tbl_changes.c.binaries,
3037 architecture = self.tbl_changes.c.architecture,
3038 distribution = self.tbl_changes.c.distribution,
3039 urgency = self.tbl_changes.c.urgency,
3040 maintainer = self.tbl_changes.c.maintainer,
3041 changedby = self.tbl_changes.c.changedby,
3042 date = self.tbl_changes.c.date,
3043 version = self.tbl_changes.c.version,
3044 files = relation(ChangePendingFile,
3045 secondary=self.tbl_changes_pending_files_map,
3046 backref="changesfile"),
3047 in_queue_id = self.tbl_changes.c.in_queue,
3048 in_queue = relation(PolicyQueue,
3049 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3050 approved_for_id = self.tbl_changes.c.approved_for))
3052 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3053 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3055 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3056 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3057 filename = self.tbl_changes_pending_files.c.filename,
3058 size = self.tbl_changes_pending_files.c.size,
3059 md5sum = self.tbl_changes_pending_files.c.md5sum,
3060 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3061 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3063 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3064 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3065 change = relation(DBChange),
3066 maintainer = relation(Maintainer,
3067 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3068 changedby = relation(Maintainer,
3069 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3070 fingerprint = relation(Fingerprint),
3071 source_files = relation(ChangePendingFile,
3072 secondary=self.tbl_changes_pending_source_files,
3073 backref="pending_sources")))
3076 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3077 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3078 keyring = relation(Keyring, backref="keyring_acl_map"),
3079 architecture = relation(Architecture)))
3081 mapper(Location, self.tbl_location,
3082 properties = dict(location_id = self.tbl_location.c.id,
3083 component_id = self.tbl_location.c.component,
3084 component = relation(Component),
3085 archive_id = self.tbl_location.c.archive,
3086 archive = relation(Archive),
3087 # FIXME: the 'type' column is old cruft and
3088 # should be removed in the future.
3089 archive_type = self.tbl_location.c.type),
3090 extension = validator)
3092 mapper(Maintainer, self.tbl_maintainer,
3093 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3094 maintains_sources = relation(DBSource, backref='maintainer',
3095 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3096 changed_sources = relation(DBSource, backref='changedby',
3097 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3098 extension = validator)
3100 mapper(NewComment, self.tbl_new_comments,
3101 properties = dict(comment_id = self.tbl_new_comments.c.id))
3103 mapper(Override, self.tbl_override,
3104 properties = dict(suite_id = self.tbl_override.c.suite,
3105 suite = relation(Suite),
3106 package = self.tbl_override.c.package,
3107 component_id = self.tbl_override.c.component,
3108 component = relation(Component),
3109 priority_id = self.tbl_override.c.priority,
3110 priority = relation(Priority),
3111 section_id = self.tbl_override.c.section,
3112 section = relation(Section),
3113 overridetype_id = self.tbl_override.c.type,
3114 overridetype = relation(OverrideType)))
3116 mapper(OverrideType, self.tbl_override_type,
3117 properties = dict(overridetype = self.tbl_override_type.c.type,
3118 overridetype_id = self.tbl_override_type.c.id))
3120 mapper(PolicyQueue, self.tbl_policy_queue,
3121 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3123 mapper(Priority, self.tbl_priority,
3124 properties = dict(priority_id = self.tbl_priority.c.id))
3126 mapper(Section, self.tbl_section,
3127 properties = dict(section_id = self.tbl_section.c.id,
3128 section=self.tbl_section.c.section))
3130 mapper(DBSource, self.tbl_source,
3131 properties = dict(source_id = self.tbl_source.c.id,
3132 version = self.tbl_source.c.version,
3133 maintainer_id = self.tbl_source.c.maintainer,
3134 poolfile_id = self.tbl_source.c.file,
3135 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3136 fingerprint_id = self.tbl_source.c.sig_fpr,
3137 fingerprint = relation(Fingerprint),
3138 changedby_id = self.tbl_source.c.changedby,
3139 srcfiles = relation(DSCFile,
3140 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3141 suites = relation(Suite, secondary=self.tbl_src_associations,
3142 backref=backref('sources', lazy='dynamic')),
3143 srcuploaders = relation(SrcUploader)),
3144 extension = validator)
3146 mapper(SourceACL, self.tbl_source_acl,
3147 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3149 mapper(SrcFormat, self.tbl_src_format,
3150 properties = dict(src_format_id = self.tbl_src_format.c.id,
3151 format_name = self.tbl_src_format.c.format_name))
3153 mapper(SrcUploader, self.tbl_src_uploaders,
3154 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3155 source_id = self.tbl_src_uploaders.c.source,
3156 source = relation(DBSource,
3157 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3158 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3159 maintainer = relation(Maintainer,
3160 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3162 mapper(Suite, self.tbl_suite,
3163 properties = dict(suite_id = self.tbl_suite.c.id,
3164 policy_queue = relation(PolicyQueue),
3165 copy_queues = relation(BuildQueue,
3166 secondary=self.tbl_suite_build_queue_copy)),
3167 extension = validator)
3169 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3170 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3171 suite = relation(Suite, backref='suitesrcformats'),
3172 src_format_id = self.tbl_suite_src_formats.c.src_format,
3173 src_format = relation(SrcFormat)))
3175 mapper(Uid, self.tbl_uid,
3176 properties = dict(uid_id = self.tbl_uid.c.id,
3177 fingerprint = relation(Fingerprint)),
3178 extension = validator)
3180 mapper(UploadBlock, self.tbl_upload_blocks,
3181 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3182 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3183 uid = relation(Uid, backref="uploadblocks")))
3185 ## Connection functions
3186 def __createconn(self):
3187 from config import Config
3191 connstr = "postgres://%s" % cnf["DB::Host"]
3192 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3193 connstr += ":%s" % cnf["DB::Port"]
3194 connstr += "/%s" % cnf["DB::Name"]
3197 connstr = "postgres:///%s" % cnf["DB::Name"]
3198 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3199 connstr += "?port=%s" % cnf["DB::Port"]
3201 self.db_pg = create_engine(connstr, echo=self.debug)
3202 self.db_meta = MetaData()
3203 self.db_meta.bind = self.db_pg
3204 self.db_smaker = sessionmaker(bind=self.db_pg,
3208 self.__setuptables()
3209 self.__setupmappers()
3212 return self.db_smaker()
3214 __all__.append('DBConn')