5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinContents(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
431 __all__.append('BinContents')
433 ################################################################################
435 class DBBinary(ORMObject):
436 def __init__(self, package = None, source = None, version = None, \
437 maintainer = None, architecture = None, poolfile = None, \
439 self.package = package
441 self.version = version
442 self.maintainer = maintainer
443 self.architecture = architecture
444 self.poolfile = poolfile
445 self.binarytype = binarytype
447 def properties(self):
448 return ['package', 'version', 'maintainer', 'source', 'architecture', \
449 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
450 'suites_count', 'binary_id']
452 def not_null_constraints(self):
453 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
456 def get_component_name(self):
457 return self.poolfile.location.component.component_name
459 __all__.append('DBBinary')
462 def get_suites_binary_in(package, session=None):
464 Returns list of Suite objects which given C{package} name is in
467 @param package: DBBinary package name to search for
470 @return: list of Suite objects for the given package
473 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
475 __all__.append('get_suites_binary_in')
478 def get_component_by_package_suite(package, suite_list, session=None):
480 Returns the component name of the newest binary package in suite_list or
481 None if no package is found.
484 @param package: DBBinary package name to search for
486 @type suite_list: list of str
487 @param suite_list: list of suite_name items
489 @rtype: str or NoneType
490 @return: name of component or None
493 binary = session.query(DBBinary).filter_by(package = package). \
494 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list)). \
495 order_by(desc(DBBinary.version)).first()
499 return binary.get_component_name()
501 __all__.append('get_component_by_package_suite')
504 def get_binary_components(package, suitename, arch, session=None):
505 # Check for packages that have moved from one component to another
506 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
507 WHERE b.package=:package AND s.suite_name=:suitename
508 AND (a.arch_string = :arch OR a.arch_string = 'all')
509 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
510 AND f.location = l.id
511 AND l.component = c.id
514 vals = {'package': package, 'suitename': suitename, 'arch': arch}
516 return session.execute(query, vals)
518 __all__.append('get_binary_components')
520 ################################################################################
522 class BinaryACL(object):
523 def __init__(self, *args, **kwargs):
527 return '<BinaryACL %s>' % self.binary_acl_id
529 __all__.append('BinaryACL')
531 ################################################################################
533 class BinaryACLMap(object):
534 def __init__(self, *args, **kwargs):
538 return '<BinaryACLMap %s>' % self.binary_acl_map_id
540 __all__.append('BinaryACLMap')
542 ################################################################################
547 ArchiveDir "%(archivepath)s";
548 OverrideDir "%(overridedir)s";
549 CacheDir "%(cachedir)s";
554 Packages::Compress ". bzip2 gzip";
555 Sources::Compress ". bzip2 gzip";
560 bindirectory "incoming"
565 BinOverride "override.sid.all3";
566 BinCacheDB "packages-accepted.db";
568 FileList "%(filelist)s";
571 Packages::Extensions ".deb .udeb";
574 bindirectory "incoming/"
577 BinOverride "override.sid.all3";
578 SrcOverride "override.sid.all3.src";
579 FileList "%(filelist)s";
583 class BuildQueue(object):
584 def __init__(self, *args, **kwargs):
588 return '<BuildQueue %s>' % self.queue_name
590 def write_metadata(self, starttime, force=False):
591 # Do we write out metafiles?
592 if not (force or self.generate_metadata):
595 session = DBConn().session().object_session(self)
597 fl_fd = fl_name = ac_fd = ac_name = None
599 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
600 startdir = os.getcwd()
603 # Grab files we want to include
604 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
605 # Write file list with newer files
606 (fl_fd, fl_name) = mkstemp()
608 os.write(fl_fd, '%s\n' % n.fullpath)
613 # Write minimal apt.conf
614 # TODO: Remove hardcoding from template
615 (ac_fd, ac_name) = mkstemp()
616 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
618 'cachedir': cnf["Dir::Cache"],
619 'overridedir': cnf["Dir::Override"],
623 # Run apt-ftparchive generate
624 os.chdir(os.path.dirname(ac_name))
625 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
627 # Run apt-ftparchive release
628 # TODO: Eww - fix this
629 bname = os.path.basename(self.path)
633 # We have to remove the Release file otherwise it'll be included in the
636 os.unlink(os.path.join(bname, 'Release'))
640 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
642 # Crude hack with open and append, but this whole section is and should be redone.
643 if self.notautomatic:
644 release=open("Release", "a")
645 release.write("NotAutomatic: yes")
650 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
651 if cnf.has_key("Dinstall::SigningPubKeyring"):
652 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
654 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
656 # Move the files if we got this far
657 os.rename('Release', os.path.join(bname, 'Release'))
659 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
661 # Clean up any left behind files
688 def clean_and_update(self, starttime, Logger, dryrun=False):
689 """WARNING: This routine commits for you"""
690 session = DBConn().session().object_session(self)
692 if self.generate_metadata and not dryrun:
693 self.write_metadata(starttime)
695 # Grab files older than our execution time
696 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
702 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
704 Logger.log(["I: Removing %s from the queue" % o.fullpath])
705 os.unlink(o.fullpath)
708 # If it wasn't there, don't worry
709 if e.errno == ENOENT:
712 # TODO: Replace with proper logging call
713 Logger.log(["E: Could not remove %s" % o.fullpath])
720 for f in os.listdir(self.path):
721 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
725 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
726 except NoResultFound:
727 fp = os.path.join(self.path, f)
729 Logger.log(["I: Would remove unused link %s" % fp])
731 Logger.log(["I: Removing unused link %s" % fp])
735 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
737 def add_file_from_pool(self, poolfile):
738 """Copies a file into the pool. Assumes that the PoolFile object is
739 attached to the same SQLAlchemy session as the Queue object is.
741 The caller is responsible for committing after calling this function."""
742 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
744 # Check if we have a file of this name or this ID already
745 for f in self.queuefiles:
746 if f.fileid is not None and f.fileid == poolfile.file_id or \
747 f.poolfile.filename == poolfile_basename:
748 # In this case, update the BuildQueueFile entry so we
749 # don't remove it too early
750 f.lastused = datetime.now()
751 DBConn().session().object_session(poolfile).add(f)
754 # Prepare BuildQueueFile object
755 qf = BuildQueueFile()
756 qf.build_queue_id = self.queue_id
757 qf.lastused = datetime.now()
758 qf.filename = poolfile_basename
760 targetpath = poolfile.fullpath
761 queuepath = os.path.join(self.path, poolfile_basename)
765 # We need to copy instead of symlink
767 utils.copy(targetpath, queuepath)
768 # NULL in the fileid field implies a copy
771 os.symlink(targetpath, queuepath)
772 qf.fileid = poolfile.file_id
776 # Get the same session as the PoolFile is using and add the qf to it
777 DBConn().session().object_session(poolfile).add(qf)
782 __all__.append('BuildQueue')
785 def get_build_queue(queuename, session=None):
787 Returns BuildQueue object for given C{queue name}, creating it if it does not
790 @type queuename: string
791 @param queuename: The name of the queue
793 @type session: Session
794 @param session: Optional SQLA session object (a temporary one will be
795 generated if not supplied)
798 @return: BuildQueue object for the given queue
801 q = session.query(BuildQueue).filter_by(queue_name=queuename)
805 except NoResultFound:
808 __all__.append('get_build_queue')
810 ################################################################################
812 class BuildQueueFile(object):
813 def __init__(self, *args, **kwargs):
817 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
821 return os.path.join(self.buildqueue.path, self.filename)
824 __all__.append('BuildQueueFile')
826 ################################################################################
828 class ChangePendingBinary(object):
829 def __init__(self, *args, **kwargs):
833 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
835 __all__.append('ChangePendingBinary')
837 ################################################################################
839 class ChangePendingFile(object):
840 def __init__(self, *args, **kwargs):
844 return '<ChangePendingFile %s>' % self.change_pending_file_id
846 __all__.append('ChangePendingFile')
848 ################################################################################
850 class ChangePendingSource(object):
851 def __init__(self, *args, **kwargs):
855 return '<ChangePendingSource %s>' % self.change_pending_source_id
857 __all__.append('ChangePendingSource')
859 ################################################################################
861 class Component(ORMObject):
862 def __init__(self, component_name = None):
863 self.component_name = component_name
865 def __eq__(self, val):
866 if isinstance(val, str):
867 return (self.component_name == val)
868 # This signals to use the normal comparison operator
869 return NotImplemented
871 def __ne__(self, val):
872 if isinstance(val, str):
873 return (self.component_name != val)
874 # This signals to use the normal comparison operator
875 return NotImplemented
877 def properties(self):
878 return ['component_name', 'component_id', 'description', 'location', \
881 def not_null_constraints(self):
882 return ['component_name']
885 __all__.append('Component')
888 def get_component(component, session=None):
890 Returns database id for given C{component}.
892 @type component: string
893 @param component: The name of the override type
896 @return: the database id for the given component
899 component = component.lower()
901 q = session.query(Component).filter_by(component_name=component)
905 except NoResultFound:
908 __all__.append('get_component')
910 ################################################################################
912 class DBConfig(object):
913 def __init__(self, *args, **kwargs):
917 return '<DBConfig %s>' % self.name
919 __all__.append('DBConfig')
921 ################################################################################
924 def get_or_set_contents_file_id(filename, session=None):
926 Returns database id for given filename.
928 If no matching file is found, a row is inserted.
930 @type filename: string
931 @param filename: The filename
932 @type session: SQLAlchemy
933 @param session: Optional SQL session object (a temporary one will be
934 generated if not supplied). If not passed, a commit will be performed at
935 the end of the function, otherwise the caller is responsible for commiting.
938 @return: the database id for the given component
941 q = session.query(ContentFilename).filter_by(filename=filename)
944 ret = q.one().cafilename_id
945 except NoResultFound:
946 cf = ContentFilename()
947 cf.filename = filename
949 session.commit_or_flush()
950 ret = cf.cafilename_id
954 __all__.append('get_or_set_contents_file_id')
957 def get_contents(suite, overridetype, section=None, session=None):
959 Returns contents for a suite / overridetype combination, limiting
960 to a section if not None.
963 @param suite: Suite object
965 @type overridetype: OverrideType
966 @param overridetype: OverrideType object
968 @type section: Section
969 @param section: Optional section object to limit results to
971 @type session: SQLAlchemy
972 @param session: Optional SQL session object (a temporary one will be
973 generated if not supplied)
976 @return: ResultsProxy object set up to return tuples of (filename, section,
980 # find me all of the contents for a given suite
981 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
985 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
986 JOIN content_file_names n ON (c.filename=n.id)
987 JOIN binaries b ON (b.id=c.binary_pkg)
988 JOIN override o ON (o.package=b.package)
989 JOIN section s ON (s.id=o.section)
990 WHERE o.suite = :suiteid AND o.type = :overridetypeid
991 AND b.type=:overridetypename"""
993 vals = {'suiteid': suite.suite_id,
994 'overridetypeid': overridetype.overridetype_id,
995 'overridetypename': overridetype.overridetype}
997 if section is not None:
998 contents_q += " AND s.id = :sectionid"
999 vals['sectionid'] = section.section_id
1001 contents_q += " ORDER BY fn"
1003 return session.execute(contents_q, vals)
1005 __all__.append('get_contents')
1007 ################################################################################
1009 class ContentFilepath(object):
1010 def __init__(self, *args, **kwargs):
1014 return '<ContentFilepath %s>' % self.filepath
1016 __all__.append('ContentFilepath')
1019 def get_or_set_contents_path_id(filepath, session=None):
1021 Returns database id for given path.
1023 If no matching file is found, a row is inserted.
1025 @type filepath: string
1026 @param filepath: The filepath
1028 @type session: SQLAlchemy
1029 @param session: Optional SQL session object (a temporary one will be
1030 generated if not supplied). If not passed, a commit will be performed at
1031 the end of the function, otherwise the caller is responsible for commiting.
1034 @return: the database id for the given path
1037 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1040 ret = q.one().cafilepath_id
1041 except NoResultFound:
1042 cf = ContentFilepath()
1043 cf.filepath = filepath
1045 session.commit_or_flush()
1046 ret = cf.cafilepath_id
1050 __all__.append('get_or_set_contents_path_id')
1052 ################################################################################
1054 class ContentAssociation(object):
1055 def __init__(self, *args, **kwargs):
1059 return '<ContentAssociation %s>' % self.ca_id
1061 __all__.append('ContentAssociation')
1063 def insert_content_paths(binary_id, fullpaths, session=None):
1065 Make sure given path is associated with given binary id
1067 @type binary_id: int
1068 @param binary_id: the id of the binary
1069 @type fullpaths: list
1070 @param fullpaths: the list of paths of the file being associated with the binary
1071 @type session: SQLAlchemy session
1072 @param session: Optional SQLAlchemy session. If this is passed, the caller
1073 is responsible for ensuring a transaction has begun and committing the
1074 results or rolling back based on the result code. If not passed, a commit
1075 will be performed at the end of the function, otherwise the caller is
1076 responsible for commiting.
1078 @return: True upon success
1081 privatetrans = False
1083 session = DBConn().session()
1088 def generate_path_dicts():
1089 for fullpath in fullpaths:
1090 if fullpath.startswith( './' ):
1091 fullpath = fullpath[2:]
1093 yield {'filename':fullpath, 'id': binary_id }
1095 for d in generate_path_dicts():
1096 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1105 traceback.print_exc()
1107 # Only rollback if we set up the session ourself
1114 __all__.append('insert_content_paths')
1116 ################################################################################
1118 class DSCFile(object):
1119 def __init__(self, *args, **kwargs):
1123 return '<DSCFile %s>' % self.dscfile_id
1125 __all__.append('DSCFile')
1128 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1130 Returns a list of DSCFiles which may be empty
1132 @type dscfile_id: int (optional)
1133 @param dscfile_id: the dscfile_id of the DSCFiles to find
1135 @type source_id: int (optional)
1136 @param source_id: the source id related to the DSCFiles to find
1138 @type poolfile_id: int (optional)
1139 @param poolfile_id: the poolfile id related to the DSCFiles to find
1142 @return: Possibly empty list of DSCFiles
1145 q = session.query(DSCFile)
1147 if dscfile_id is not None:
1148 q = q.filter_by(dscfile_id=dscfile_id)
1150 if source_id is not None:
1151 q = q.filter_by(source_id=source_id)
1153 if poolfile_id is not None:
1154 q = q.filter_by(poolfile_id=poolfile_id)
1158 __all__.append('get_dscfiles')
1160 ################################################################################
1162 class PoolFile(ORMObject):
1163 def __init__(self, filename = None, location = None, filesize = -1, \
1165 self.filename = filename
1166 self.location = location
1167 self.filesize = filesize
1168 self.md5sum = md5sum
1172 return os.path.join(self.location.path, self.filename)
1174 def is_valid(self, filesize = -1, md5sum = None):\
1175 return self.filesize == filesize and self.md5sum == md5sum
1177 def properties(self):
1178 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1179 'sha256sum', 'location', 'source', 'binary', 'last_used']
1181 def not_null_constraints(self):
1182 return ['filename', 'md5sum', 'location']
1184 __all__.append('PoolFile')
1187 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1190 (ValidFileFound [boolean], PoolFile object or None)
1192 @type filename: string
1193 @param filename: the filename of the file to check against the DB
1196 @param filesize: the size of the file to check against the DB
1198 @type md5sum: string
1199 @param md5sum: the md5sum of the file to check against the DB
1201 @type location_id: int
1202 @param location_id: the id of the location to look in
1205 @return: Tuple of length 2.
1206 - If valid pool file found: (C{True}, C{PoolFile object})
1207 - If valid pool file not found:
1208 - (C{False}, C{None}) if no file found
1209 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1212 poolfile = session.query(Location).get(location_id). \
1213 files.filter_by(filename=filename).first()
1215 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1218 return (valid, poolfile)
1220 __all__.append('check_poolfile')
1222 # TODO: the implementation can trivially be inlined at the place where the
1223 # function is called
1225 def get_poolfile_by_id(file_id, session=None):
1227 Returns a PoolFile objects or None for the given id
1230 @param file_id: the id of the file to look for
1232 @rtype: PoolFile or None
1233 @return: either the PoolFile object or None
1236 return session.query(PoolFile).get(file_id)
1238 __all__.append('get_poolfile_by_id')
1241 def get_poolfile_like_name(filename, session=None):
1243 Returns an array of PoolFile objects which are like the given name
1245 @type filename: string
1246 @param filename: the filename of the file to check against the DB
1249 @return: array of PoolFile objects
1252 # TODO: There must be a way of properly using bind parameters with %FOO%
1253 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1257 __all__.append('get_poolfile_like_name')
1260 def add_poolfile(filename, datadict, location_id, session=None):
1262 Add a new file to the pool
1264 @type filename: string
1265 @param filename: filename
1267 @type datadict: dict
1268 @param datadict: dict with needed data
1270 @type location_id: int
1271 @param location_id: database id of the location
1274 @return: the PoolFile object created
1276 poolfile = PoolFile()
1277 poolfile.filename = filename
1278 poolfile.filesize = datadict["size"]
1279 poolfile.md5sum = datadict["md5sum"]
1280 poolfile.sha1sum = datadict["sha1sum"]
1281 poolfile.sha256sum = datadict["sha256sum"]
1282 poolfile.location_id = location_id
1284 session.add(poolfile)
1285 # Flush to get a file id (NB: This is not a commit)
1290 __all__.append('add_poolfile')
1292 ################################################################################
1294 class Fingerprint(ORMObject):
1295 def __init__(self, fingerprint = None):
1296 self.fingerprint = fingerprint
1298 def properties(self):
1299 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1302 def not_null_constraints(self):
1303 return ['fingerprint']
1305 __all__.append('Fingerprint')
1308 def get_fingerprint(fpr, session=None):
1310 Returns Fingerprint object for given fpr.
1313 @param fpr: The fpr to find / add
1315 @type session: SQLAlchemy
1316 @param session: Optional SQL session object (a temporary one will be
1317 generated if not supplied).
1320 @return: the Fingerprint object for the given fpr or None
1323 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1327 except NoResultFound:
1332 __all__.append('get_fingerprint')
1335 def get_or_set_fingerprint(fpr, session=None):
1337 Returns Fingerprint object for given fpr.
1339 If no matching fpr is found, a row is inserted.
1342 @param fpr: The fpr to find / add
1344 @type session: SQLAlchemy
1345 @param session: Optional SQL session object (a temporary one will be
1346 generated if not supplied). If not passed, a commit will be performed at
1347 the end of the function, otherwise the caller is responsible for commiting.
1348 A flush will be performed either way.
1351 @return: the Fingerprint object for the given fpr
1354 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1358 except NoResultFound:
1359 fingerprint = Fingerprint()
1360 fingerprint.fingerprint = fpr
1361 session.add(fingerprint)
1362 session.commit_or_flush()
1367 __all__.append('get_or_set_fingerprint')
1369 ################################################################################
1371 # Helper routine for Keyring class
1372 def get_ldap_name(entry):
1374 for k in ["cn", "mn", "sn"]:
1376 if ret and ret[0] != "" and ret[0] != "-":
1378 return " ".join(name)
1380 ################################################################################
1382 class Keyring(object):
1383 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1384 " --with-colons --fingerprint --fingerprint"
1389 def __init__(self, *args, **kwargs):
1393 return '<Keyring %s>' % self.keyring_name
1395 def de_escape_gpg_str(self, txt):
1396 esclist = re.split(r'(\\x..)', txt)
1397 for x in range(1,len(esclist),2):
1398 esclist[x] = "%c" % (int(esclist[x][2:],16))
1399 return "".join(esclist)
1401 def parse_address(self, uid):
1402 """parses uid and returns a tuple of real name and email address"""
1404 (name, address) = email.Utils.parseaddr(uid)
1405 name = re.sub(r"\s*[(].*[)]", "", name)
1406 name = self.de_escape_gpg_str(name)
1409 return (name, address)
1411 def load_keys(self, keyring):
1412 if not self.keyring_id:
1413 raise Exception('Must be initialized with database information')
1415 k = os.popen(self.gpg_invocation % keyring, "r")
1419 for line in k.xreadlines():
1420 field = line.split(":")
1421 if field[0] == "pub":
1424 (name, addr) = self.parse_address(field[9])
1426 self.keys[key]["email"] = addr
1427 self.keys[key]["name"] = name
1428 self.keys[key]["fingerprints"] = []
1430 elif key and field[0] == "sub" and len(field) >= 12:
1431 signingkey = ("s" in field[11])
1432 elif key and field[0] == "uid":
1433 (name, addr) = self.parse_address(field[9])
1434 if "email" not in self.keys[key] and "@" in addr:
1435 self.keys[key]["email"] = addr
1436 self.keys[key]["name"] = name
1437 elif signingkey and field[0] == "fpr":
1438 self.keys[key]["fingerprints"].append(field[9])
1439 self.fpr_lookup[field[9]] = key
1441 def import_users_from_ldap(self, session):
1445 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1446 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1448 l = ldap.open(LDAPServer)
1449 l.simple_bind_s("","")
1450 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1451 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1452 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1454 ldap_fin_uid_id = {}
1461 uid = entry["uid"][0]
1462 name = get_ldap_name(entry)
1463 fingerprints = entry["keyFingerPrint"]
1465 for f in fingerprints:
1466 key = self.fpr_lookup.get(f, None)
1467 if key not in self.keys:
1469 self.keys[key]["uid"] = uid
1473 keyid = get_or_set_uid(uid, session).uid_id
1474 byuid[keyid] = (uid, name)
1475 byname[uid] = (keyid, name)
1477 return (byname, byuid)
1479 def generate_users_from_keyring(self, format, session):
1483 for x in self.keys.keys():
1484 if "email" not in self.keys[x]:
1486 self.keys[x]["uid"] = format % "invalid-uid"
1488 uid = format % self.keys[x]["email"]
1489 keyid = get_or_set_uid(uid, session).uid_id
1490 byuid[keyid] = (uid, self.keys[x]["name"])
1491 byname[uid] = (keyid, self.keys[x]["name"])
1492 self.keys[x]["uid"] = uid
1495 uid = format % "invalid-uid"
1496 keyid = get_or_set_uid(uid, session).uid_id
1497 byuid[keyid] = (uid, "ungeneratable user id")
1498 byname[uid] = (keyid, "ungeneratable user id")
1500 return (byname, byuid)
1502 __all__.append('Keyring')
1505 def get_keyring(keyring, session=None):
1507 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1508 If C{keyring} already has an entry, simply return the existing Keyring
1510 @type keyring: string
1511 @param keyring: the keyring name
1514 @return: the Keyring object for this keyring
1517 q = session.query(Keyring).filter_by(keyring_name=keyring)
1521 except NoResultFound:
1524 __all__.append('get_keyring')
1526 ################################################################################
1528 class KeyringACLMap(object):
1529 def __init__(self, *args, **kwargs):
1533 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1535 __all__.append('KeyringACLMap')
1537 ################################################################################
1539 class DBChange(object):
1540 def __init__(self, *args, **kwargs):
1544 return '<DBChange %s>' % self.changesname
1546 def clean_from_queue(self):
1547 session = DBConn().session().object_session(self)
1549 # Remove changes_pool_files entries
1552 # Remove changes_pending_files references
1555 # Clear out of queue
1556 self.in_queue = None
1557 self.approved_for_id = None
1559 __all__.append('DBChange')
1562 def get_dbchange(filename, session=None):
1564 returns DBChange object for given C{filename}.
1566 @type filename: string
1567 @param filename: the name of the file
1569 @type session: Session
1570 @param session: Optional SQLA session object (a temporary one will be
1571 generated if not supplied)
1574 @return: DBChange object for the given filename (C{None} if not present)
1577 q = session.query(DBChange).filter_by(changesname=filename)
1581 except NoResultFound:
1584 __all__.append('get_dbchange')
1586 ################################################################################
1588 # TODO: Why do we have a separate Location class? Can't it be fully integrated
1589 # into class Component?
1590 class Location(ORMObject):
1591 def __init__(self, path = None, component = None):
1593 self.component = component
1594 # the column 'type' should go away, see comment at mapper
1595 self.archive_type = 'pool'
1597 def properties(self):
1598 return ['path', 'archive_type', 'component', 'files_count']
1600 def not_null_constraints(self):
1601 return ['path', 'archive_type']
1603 __all__.append('Location')
1606 def get_location(location, component=None, archive=None, session=None):
1608 Returns Location object for the given combination of location, component
1611 @type location: string
1612 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1614 @type component: string
1615 @param component: the component name (if None, no restriction applied)
1617 @type archive: string
1618 @param archive: the archive name (if None, no restriction applied)
1620 @rtype: Location / None
1621 @return: Either a Location object or None if one can't be found
1624 q = session.query(Location).filter_by(path=location)
1626 if archive is not None:
1627 q = q.join(Archive).filter_by(archive_name=archive)
1629 if component is not None:
1630 q = q.join(Component).filter_by(component_name=component)
1634 except NoResultFound:
1637 __all__.append('get_location')
1639 ################################################################################
1641 class Maintainer(ORMObject):
1642 def __init__(self, name = None):
1645 def properties(self):
1646 return ['name', 'maintainer_id']
1648 def not_null_constraints(self):
1651 def get_split_maintainer(self):
1652 if not hasattr(self, 'name') or self.name is None:
1653 return ('', '', '', '')
1655 return fix_maintainer(self.name.strip())
1657 __all__.append('Maintainer')
1660 def get_or_set_maintainer(name, session=None):
1662 Returns Maintainer object for given maintainer name.
1664 If no matching maintainer name is found, a row is inserted.
1667 @param name: The maintainer name to add
1669 @type session: SQLAlchemy
1670 @param session: Optional SQL session object (a temporary one will be
1671 generated if not supplied). If not passed, a commit will be performed at
1672 the end of the function, otherwise the caller is responsible for commiting.
1673 A flush will be performed either way.
1676 @return: the Maintainer object for the given maintainer
1679 q = session.query(Maintainer).filter_by(name=name)
1682 except NoResultFound:
1683 maintainer = Maintainer()
1684 maintainer.name = name
1685 session.add(maintainer)
1686 session.commit_or_flush()
1691 __all__.append('get_or_set_maintainer')
1694 def get_maintainer(maintainer_id, session=None):
1696 Return the name of the maintainer behind C{maintainer_id} or None if that
1697 maintainer_id is invalid.
1699 @type maintainer_id: int
1700 @param maintainer_id: the id of the maintainer
1703 @return: the Maintainer with this C{maintainer_id}
1706 return session.query(Maintainer).get(maintainer_id)
1708 __all__.append('get_maintainer')
1710 ################################################################################
1712 class NewComment(object):
1713 def __init__(self, *args, **kwargs):
1717 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1719 __all__.append('NewComment')
1722 def has_new_comment(package, version, session=None):
1724 Returns true if the given combination of C{package}, C{version} has a comment.
1726 @type package: string
1727 @param package: name of the package
1729 @type version: string
1730 @param version: package version
1732 @type session: Session
1733 @param session: Optional SQLA session object (a temporary one will be
1734 generated if not supplied)
1740 q = session.query(NewComment)
1741 q = q.filter_by(package=package)
1742 q = q.filter_by(version=version)
1744 return bool(q.count() > 0)
1746 __all__.append('has_new_comment')
1749 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1751 Returns (possibly empty) list of NewComment objects for the given
1754 @type package: string (optional)
1755 @param package: name of the package
1757 @type version: string (optional)
1758 @param version: package version
1760 @type comment_id: int (optional)
1761 @param comment_id: An id of a comment
1763 @type session: Session
1764 @param session: Optional SQLA session object (a temporary one will be
1765 generated if not supplied)
1768 @return: A (possibly empty) list of NewComment objects will be returned
1771 q = session.query(NewComment)
1772 if package is not None: q = q.filter_by(package=package)
1773 if version is not None: q = q.filter_by(version=version)
1774 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1778 __all__.append('get_new_comments')
1780 ################################################################################
1782 class Override(object):
1783 def __init__(self, *args, **kwargs):
1787 return '<Override %s (%s)>' % (self.package, self.suite_id)
1789 __all__.append('Override')
1792 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1794 Returns Override object for the given parameters
1796 @type package: string
1797 @param package: The name of the package
1799 @type suite: string, list or None
1800 @param suite: The name of the suite (or suites if a list) to limit to. If
1801 None, don't limit. Defaults to None.
1803 @type component: string, list or None
1804 @param component: The name of the component (or components if a list) to
1805 limit to. If None, don't limit. Defaults to None.
1807 @type overridetype: string, list or None
1808 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1809 limit to. If None, don't limit. Defaults to None.
1811 @type session: Session
1812 @param session: Optional SQLA session object (a temporary one will be
1813 generated if not supplied)
1816 @return: A (possibly empty) list of Override objects will be returned
1819 q = session.query(Override)
1820 q = q.filter_by(package=package)
1822 if suite is not None:
1823 if not isinstance(suite, list): suite = [suite]
1824 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1826 if component is not None:
1827 if not isinstance(component, list): component = [component]
1828 q = q.join(Component).filter(Component.component_name.in_(component))
1830 if overridetype is not None:
1831 if not isinstance(overridetype, list): overridetype = [overridetype]
1832 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1836 __all__.append('get_override')
1839 ################################################################################
1841 class OverrideType(object):
1842 def __init__(self, *args, **kwargs):
1846 return '<OverrideType %s>' % self.overridetype
1848 __all__.append('OverrideType')
1851 def get_override_type(override_type, session=None):
1853 Returns OverrideType object for given C{override type}.
1855 @type override_type: string
1856 @param override_type: The name of the override type
1858 @type session: Session
1859 @param session: Optional SQLA session object (a temporary one will be
1860 generated if not supplied)
1863 @return: the database id for the given override type
1866 q = session.query(OverrideType).filter_by(overridetype=override_type)
1870 except NoResultFound:
1873 __all__.append('get_override_type')
1875 ################################################################################
1877 class DebContents(object):
1878 def __init__(self, *args, **kwargs):
1882 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1884 __all__.append('DebContents')
1887 class UdebContents(object):
1888 def __init__(self, *args, **kwargs):
1892 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1894 __all__.append('UdebContents')
1896 class PendingBinContents(object):
1897 def __init__(self, *args, **kwargs):
1901 return '<PendingBinContents %s>' % self.contents_id
1903 __all__.append('PendingBinContents')
1905 def insert_pending_content_paths(package,
1910 Make sure given paths are temporarily associated with given
1914 @param package: the package to associate with should have been read in from the binary control file
1915 @type fullpaths: list
1916 @param fullpaths: the list of paths of the file being associated with the binary
1917 @type session: SQLAlchemy session
1918 @param session: Optional SQLAlchemy session. If this is passed, the caller
1919 is responsible for ensuring a transaction has begun and committing the
1920 results or rolling back based on the result code. If not passed, a commit
1921 will be performed at the end of the function
1923 @return: True upon success, False if there is a problem
1926 privatetrans = False
1929 session = DBConn().session()
1933 arch = get_architecture(package['Architecture'], session)
1934 arch_id = arch.arch_id
1936 # Remove any already existing recorded files for this package
1937 q = session.query(PendingBinContents)
1938 q = q.filter_by(package=package['Package'])
1939 q = q.filter_by(version=package['Version'])
1940 q = q.filter_by(architecture=arch_id)
1943 for fullpath in fullpaths:
1945 if fullpath.startswith( "./" ):
1946 fullpath = fullpath[2:]
1948 pca = PendingBinContents()
1949 pca.package = package['Package']
1950 pca.version = package['Version']
1952 pca.architecture = arch_id
1955 pca.type = 8 # gross
1957 pca.type = 7 # also gross
1960 # Only commit if we set up the session ourself
1968 except Exception, e:
1969 traceback.print_exc()
1971 # Only rollback if we set up the session ourself
1978 __all__.append('insert_pending_content_paths')
1980 ################################################################################
1982 class PolicyQueue(object):
1983 def __init__(self, *args, **kwargs):
1987 return '<PolicyQueue %s>' % self.queue_name
1989 __all__.append('PolicyQueue')
1992 def get_policy_queue(queuename, session=None):
1994 Returns PolicyQueue object for given C{queue name}
1996 @type queuename: string
1997 @param queuename: The name of the queue
1999 @type session: Session
2000 @param session: Optional SQLA session object (a temporary one will be
2001 generated if not supplied)
2004 @return: PolicyQueue object for the given queue
2007 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2011 except NoResultFound:
2014 __all__.append('get_policy_queue')
2017 def get_policy_queue_from_path(pathname, session=None):
2019 Returns PolicyQueue object for given C{path name}
2021 @type queuename: string
2022 @param queuename: The path
2024 @type session: Session
2025 @param session: Optional SQLA session object (a temporary one will be
2026 generated if not supplied)
2029 @return: PolicyQueue object for the given queue
2032 q = session.query(PolicyQueue).filter_by(path=pathname)
2036 except NoResultFound:
2039 __all__.append('get_policy_queue_from_path')
2041 ################################################################################
2043 class Priority(object):
2044 def __init__(self, *args, **kwargs):
2047 def __eq__(self, val):
2048 if isinstance(val, str):
2049 return (self.priority == val)
2050 # This signals to use the normal comparison operator
2051 return NotImplemented
2053 def __ne__(self, val):
2054 if isinstance(val, str):
2055 return (self.priority != val)
2056 # This signals to use the normal comparison operator
2057 return NotImplemented
2060 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2062 __all__.append('Priority')
2065 def get_priority(priority, session=None):
2067 Returns Priority object for given C{priority name}.
2069 @type priority: string
2070 @param priority: The name of the priority
2072 @type session: Session
2073 @param session: Optional SQLA session object (a temporary one will be
2074 generated if not supplied)
2077 @return: Priority object for the given priority
2080 q = session.query(Priority).filter_by(priority=priority)
2084 except NoResultFound:
2087 __all__.append('get_priority')
2090 def get_priorities(session=None):
2092 Returns dictionary of priority names -> id mappings
2094 @type session: Session
2095 @param session: Optional SQL session object (a temporary one will be
2096 generated if not supplied)
2099 @return: dictionary of priority names -> id mappings
2103 q = session.query(Priority)
2105 ret[x.priority] = x.priority_id
2109 __all__.append('get_priorities')
2111 ################################################################################
2113 class Section(object):
2114 def __init__(self, *args, **kwargs):
2117 def __eq__(self, val):
2118 if isinstance(val, str):
2119 return (self.section == val)
2120 # This signals to use the normal comparison operator
2121 return NotImplemented
2123 def __ne__(self, val):
2124 if isinstance(val, str):
2125 return (self.section != val)
2126 # This signals to use the normal comparison operator
2127 return NotImplemented
2130 return '<Section %s>' % self.section
2132 __all__.append('Section')
2135 def get_section(section, session=None):
2137 Returns Section object for given C{section name}.
2139 @type section: string
2140 @param section: The name of the section
2142 @type session: Session
2143 @param session: Optional SQLA session object (a temporary one will be
2144 generated if not supplied)
2147 @return: Section object for the given section name
2150 q = session.query(Section).filter_by(section=section)
2154 except NoResultFound:
2157 __all__.append('get_section')
2160 def get_sections(session=None):
2162 Returns dictionary of section names -> id mappings
2164 @type session: Session
2165 @param session: Optional SQL session object (a temporary one will be
2166 generated if not supplied)
2169 @return: dictionary of section names -> id mappings
2173 q = session.query(Section)
2175 ret[x.section] = x.section_id
2179 __all__.append('get_sections')
2181 ################################################################################
2183 class DBSource(ORMObject):
2184 def __init__(self, source = None, version = None, maintainer = None, \
2185 changedby = None, poolfile = None, install_date = None):
2186 self.source = source
2187 self.version = version
2188 self.maintainer = maintainer
2189 self.changedby = changedby
2190 self.poolfile = poolfile
2191 self.install_date = install_date
2193 def properties(self):
2194 return ['source', 'source_id', 'maintainer', 'changedby', \
2195 'fingerprint', 'poolfile', 'version', 'suites_count', \
2196 'install_date', 'binaries_count']
2198 def not_null_constraints(self):
2199 return ['source', 'version', 'install_date', 'maintainer', \
2200 'changedby', 'poolfile', 'install_date']
2202 __all__.append('DBSource')
2205 def source_exists(source, source_version, suites = ["any"], session=None):
2207 Ensure that source exists somewhere in the archive for the binary
2208 upload being processed.
2209 1. exact match => 1.0-3
2210 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2212 @type source: string
2213 @param source: source name
2215 @type source_version: string
2216 @param source_version: expected source version
2219 @param suites: list of suites to check in, default I{any}
2221 @type session: Session
2222 @param session: Optional SQLA session object (a temporary one will be
2223 generated if not supplied)
2226 @return: returns 1 if a source with expected version is found, otherwise 0
2233 from daklib.regexes import re_bin_only_nmu
2234 orig_source_version = re_bin_only_nmu.sub('', source_version)
2236 for suite in suites:
2237 q = session.query(DBSource).filter_by(source=source). \
2238 filter(DBSource.version.in_([source_version, orig_source_version]))
2240 # source must exist in suite X, or in some other suite that's
2241 # mapped to X, recursively... silent-maps are counted too,
2242 # unreleased-maps aren't.
2243 maps = cnf.ValueList("SuiteMappings")[:]
2245 maps = [ m.split() for m in maps ]
2246 maps = [ (x[1], x[2]) for x in maps
2247 if x[0] == "map" or x[0] == "silent-map" ]
2250 if x[1] in s and x[0] not in s:
2253 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2258 # No source found so return not ok
2263 __all__.append('source_exists')
2266 def get_suites_source_in(source, session=None):
2268 Returns list of Suite objects which given C{source} name is in
2271 @param source: DBSource package name to search for
2274 @return: list of Suite objects for the given source
2277 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2279 __all__.append('get_suites_source_in')
2282 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2284 Returns list of DBSource objects for given C{source} name and other parameters
2287 @param source: DBSource package name to search for
2289 @type version: str or None
2290 @param version: DBSource version name to search for or None if not applicable
2292 @type dm_upload_allowed: bool
2293 @param dm_upload_allowed: If None, no effect. If True or False, only
2294 return packages with that dm_upload_allowed setting
2296 @type session: Session
2297 @param session: Optional SQL session object (a temporary one will be
2298 generated if not supplied)
2301 @return: list of DBSource objects for the given name (may be empty)
2304 q = session.query(DBSource).filter_by(source=source)
2306 if version is not None:
2307 q = q.filter_by(version=version)
2309 if dm_upload_allowed is not None:
2310 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2314 __all__.append('get_sources_from_name')
2316 # FIXME: This function fails badly if it finds more than 1 source package and
2317 # its implementation is trivial enough to be inlined.
2319 def get_source_in_suite(source, suite, session=None):
2321 Returns a DBSource object for a combination of C{source} and C{suite}.
2323 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2324 - B{suite} - a suite name, eg. I{unstable}
2326 @type source: string
2327 @param source: source package name
2330 @param suite: the suite name
2333 @return: the version for I{source} in I{suite}
2337 q = get_suite(suite, session).get_sources(source)
2340 except NoResultFound:
2343 __all__.append('get_source_in_suite')
2345 ################################################################################
2348 def add_dsc_to_db(u, filename, session=None):
2349 entry = u.pkg.files[filename]
2353 source.source = u.pkg.dsc["source"]
2354 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2355 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2356 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2357 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2358 source.install_date = datetime.now().date()
2360 dsc_component = entry["component"]
2361 dsc_location_id = entry["location id"]
2363 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2365 # Set up a new poolfile if necessary
2366 if not entry.has_key("files id") or not entry["files id"]:
2367 filename = entry["pool name"] + filename
2368 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2370 pfs.append(poolfile)
2371 entry["files id"] = poolfile.file_id
2373 source.poolfile_id = entry["files id"]
2376 suite_names = u.pkg.changes["distribution"].keys()
2377 source.suites = session.query(Suite). \
2378 filter(Suite.suite_name.in_(suite_names)).all()
2380 # Add the source files to the DB (files and dsc_files)
2382 dscfile.source_id = source.source_id
2383 dscfile.poolfile_id = entry["files id"]
2384 session.add(dscfile)
2386 for dsc_file, dentry in u.pkg.dsc_files.items():
2388 df.source_id = source.source_id
2390 # If the .orig tarball is already in the pool, it's
2391 # files id is stored in dsc_files by check_dsc().
2392 files_id = dentry.get("files id", None)
2394 # Find the entry in the files hash
2395 # TODO: Bail out here properly
2397 for f, e in u.pkg.files.items():
2402 if files_id is None:
2403 filename = dfentry["pool name"] + dsc_file
2405 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2406 # FIXME: needs to check for -1/-2 and or handle exception
2407 if found and obj is not None:
2408 files_id = obj.file_id
2411 # If still not found, add it
2412 if files_id is None:
2413 # HACK: Force sha1sum etc into dentry
2414 dentry["sha1sum"] = dfentry["sha1sum"]
2415 dentry["sha256sum"] = dfentry["sha256sum"]
2416 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2417 pfs.append(poolfile)
2418 files_id = poolfile.file_id
2420 poolfile = get_poolfile_by_id(files_id, session)
2421 if poolfile is None:
2422 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2423 pfs.append(poolfile)
2425 df.poolfile_id = files_id
2428 # Add the src_uploaders to the DB
2429 uploader_ids = [source.maintainer_id]
2430 if u.pkg.dsc.has_key("uploaders"):
2431 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2433 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2436 for up_id in uploader_ids:
2437 if added_ids.has_key(up_id):
2439 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2445 su.maintainer_id = up_id
2446 su.source_id = source.source_id
2451 return source, dsc_component, dsc_location_id, pfs
2453 __all__.append('add_dsc_to_db')
2456 def add_deb_to_db(u, filename, session=None):
2458 Contrary to what you might expect, this routine deals with both
2459 debs and udebs. That info is in 'dbtype', whilst 'type' is
2460 'deb' for both of them
2463 entry = u.pkg.files[filename]
2466 bin.package = entry["package"]
2467 bin.version = entry["version"]
2468 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2469 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2470 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2471 bin.binarytype = entry["dbtype"]
2474 filename = entry["pool name"] + filename
2475 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2476 if not entry.get("location id", None):
2477 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2479 if entry.get("files id", None):
2480 poolfile = get_poolfile_by_id(bin.poolfile_id)
2481 bin.poolfile_id = entry["files id"]
2483 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2484 bin.poolfile_id = entry["files id"] = poolfile.file_id
2487 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2488 if len(bin_sources) != 1:
2489 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2490 (bin.package, bin.version, entry["architecture"],
2491 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2493 bin.source_id = bin_sources[0].source_id
2495 # Add and flush object so it has an ID
2498 suite_names = u.pkg.changes["distribution"].keys()
2499 bin.suites = session.query(Suite). \
2500 filter(Suite.suite_name.in_(suite_names)).all()
2504 # Deal with contents - disabled for now
2505 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2507 # print "REJECT\nCould not determine contents of package %s" % bin.package
2508 # session.rollback()
2509 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2513 __all__.append('add_deb_to_db')
2515 ################################################################################
2517 class SourceACL(object):
2518 def __init__(self, *args, **kwargs):
2522 return '<SourceACL %s>' % self.source_acl_id
2524 __all__.append('SourceACL')
2526 ################################################################################
2528 class SrcFormat(object):
2529 def __init__(self, *args, **kwargs):
2533 return '<SrcFormat %s>' % (self.format_name)
2535 __all__.append('SrcFormat')
2537 ################################################################################
2539 class SrcUploader(object):
2540 def __init__(self, *args, **kwargs):
2544 return '<SrcUploader %s>' % self.uploader_id
2546 __all__.append('SrcUploader')
2548 ################################################################################
2550 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2551 ('SuiteID', 'suite_id'),
2552 ('Version', 'version'),
2553 ('Origin', 'origin'),
2555 ('Description', 'description'),
2556 ('Untouchable', 'untouchable'),
2557 ('Announce', 'announce'),
2558 ('Codename', 'codename'),
2559 ('OverrideCodename', 'overridecodename'),
2560 ('ValidTime', 'validtime'),
2561 ('Priority', 'priority'),
2562 ('NotAutomatic', 'notautomatic'),
2563 ('CopyChanges', 'copychanges'),
2564 ('OverrideSuite', 'overridesuite')]
2566 # Why the heck don't we have any UNIQUE constraints in table suite?
2567 # TODO: Add UNIQUE constraints for appropriate columns.
2568 class Suite(ORMObject):
2569 def __init__(self, suite_name = None, version = None):
2570 self.suite_name = suite_name
2571 self.version = version
2573 def properties(self):
2574 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2576 def not_null_constraints(self):
2577 return ['suite_name', 'version']
2579 def __eq__(self, val):
2580 if isinstance(val, str):
2581 return (self.suite_name == val)
2582 # This signals to use the normal comparison operator
2583 return NotImplemented
2585 def __ne__(self, val):
2586 if isinstance(val, str):
2587 return (self.suite_name != val)
2588 # This signals to use the normal comparison operator
2589 return NotImplemented
2593 for disp, field in SUITE_FIELDS:
2594 val = getattr(self, field, None)
2596 ret.append("%s: %s" % (disp, val))
2598 return "\n".join(ret)
2600 def get_architectures(self, skipsrc=False, skipall=False):
2602 Returns list of Architecture objects
2604 @type skipsrc: boolean
2605 @param skipsrc: Whether to skip returning the 'source' architecture entry
2608 @type skipall: boolean
2609 @param skipall: Whether to skip returning the 'all' architecture entry
2613 @return: list of Architecture objects for the given name (may be empty)
2616 q = object_session(self).query(Architecture).with_parent(self)
2618 q = q.filter(Architecture.arch_string != 'source')
2620 q = q.filter(Architecture.arch_string != 'all')
2621 return q.order_by(Architecture.arch_string).all()
2623 def get_sources(self, source):
2625 Returns a query object representing DBSource that is part of C{suite}.
2627 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2629 @type source: string
2630 @param source: source package name
2632 @rtype: sqlalchemy.orm.query.Query
2633 @return: a query of DBSource
2637 session = object_session(self)
2638 return session.query(DBSource).filter_by(source = source). \
2641 __all__.append('Suite')
2644 def get_suite(suite, session=None):
2646 Returns Suite object for given C{suite name}.
2649 @param suite: The name of the suite
2651 @type session: Session
2652 @param session: Optional SQLA session object (a temporary one will be
2653 generated if not supplied)
2656 @return: Suite object for the requested suite name (None if not present)
2659 q = session.query(Suite).filter_by(suite_name=suite)
2663 except NoResultFound:
2666 __all__.append('get_suite')
2668 ################################################################################
2670 # TODO: should be removed because the implementation is too trivial
2672 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2674 Returns list of Architecture objects for given C{suite} name
2677 @param suite: Suite name to search for
2679 @type skipsrc: boolean
2680 @param skipsrc: Whether to skip returning the 'source' architecture entry
2683 @type skipall: boolean
2684 @param skipall: Whether to skip returning the 'all' architecture entry
2687 @type session: Session
2688 @param session: Optional SQL session object (a temporary one will be
2689 generated if not supplied)
2692 @return: list of Architecture objects for the given name (may be empty)
2695 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2697 __all__.append('get_suite_architectures')
2699 ################################################################################
2701 class SuiteSrcFormat(object):
2702 def __init__(self, *args, **kwargs):
2706 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2708 __all__.append('SuiteSrcFormat')
2711 def get_suite_src_formats(suite, session=None):
2713 Returns list of allowed SrcFormat for C{suite}.
2716 @param suite: Suite name to search for
2718 @type session: Session
2719 @param session: Optional SQL session object (a temporary one will be
2720 generated if not supplied)
2723 @return: the list of allowed source formats for I{suite}
2726 q = session.query(SrcFormat)
2727 q = q.join(SuiteSrcFormat)
2728 q = q.join(Suite).filter_by(suite_name=suite)
2729 q = q.order_by('format_name')
2733 __all__.append('get_suite_src_formats')
2735 ################################################################################
2737 class Uid(ORMObject):
2738 def __init__(self, uid = None, name = None):
2742 def __eq__(self, val):
2743 if isinstance(val, str):
2744 return (self.uid == val)
2745 # This signals to use the normal comparison operator
2746 return NotImplemented
2748 def __ne__(self, val):
2749 if isinstance(val, str):
2750 return (self.uid != val)
2751 # This signals to use the normal comparison operator
2752 return NotImplemented
2754 def properties(self):
2755 return ['uid', 'name', 'fingerprint']
2757 def not_null_constraints(self):
2760 __all__.append('Uid')
2763 def get_or_set_uid(uidname, session=None):
2765 Returns uid object for given uidname.
2767 If no matching uidname is found, a row is inserted.
2769 @type uidname: string
2770 @param uidname: The uid to add
2772 @type session: SQLAlchemy
2773 @param session: Optional SQL session object (a temporary one will be
2774 generated if not supplied). If not passed, a commit will be performed at
2775 the end of the function, otherwise the caller is responsible for commiting.
2778 @return: the uid object for the given uidname
2781 q = session.query(Uid).filter_by(uid=uidname)
2785 except NoResultFound:
2789 session.commit_or_flush()
2794 __all__.append('get_or_set_uid')
2797 def get_uid_from_fingerprint(fpr, session=None):
2798 q = session.query(Uid)
2799 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2803 except NoResultFound:
2806 __all__.append('get_uid_from_fingerprint')
2808 ################################################################################
2810 class UploadBlock(object):
2811 def __init__(self, *args, **kwargs):
2815 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2817 __all__.append('UploadBlock')
2819 ################################################################################
2821 class DBConn(object):
2823 database module init.
2827 def __init__(self, *args, **kwargs):
2828 self.__dict__ = self.__shared_state
2830 if not getattr(self, 'initialised', False):
2831 self.initialised = True
2832 self.debug = kwargs.has_key('debug')
2835 def __setuptables(self):
2836 tables_with_primary = (
2847 'changes_pending_binaries',
2848 'changes_pending_files',
2849 'changes_pending_source',
2859 'pending_bin_contents',
2871 # The following tables have primary keys but sqlalchemy
2872 # version 0.5 fails to reflect them correctly with database
2873 # versions before upgrade #41.
2875 #'build_queue_files',
2878 tables_no_primary = (
2880 'changes_pending_files_map',
2881 'changes_pending_source_files',
2882 'changes_pool_files',
2885 'suite_architectures',
2886 'suite_src_formats',
2887 'suite_build_queue_copy',
2889 # see the comment above
2891 'build_queue_files',
2895 'almost_obsolete_all_associations',
2896 'almost_obsolete_src_associations',
2897 'any_associations_source',
2898 'bin_assoc_by_arch',
2899 'bin_associations_binaries',
2900 'binaries_suite_arch',
2901 'binfiles_suite_component_arch',
2904 'newest_all_associations',
2905 'newest_any_associations',
2907 'newest_src_association',
2908 'obsolete_all_associations',
2909 'obsolete_any_associations',
2910 'obsolete_any_by_all_associations',
2911 'obsolete_src_associations',
2913 'src_associations_bin',
2914 'src_associations_src',
2915 'suite_arch_by_name',
2918 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2919 # correctly and that is why we have to use a workaround. It can
2920 # be removed as soon as we switch to version 0.6.
2921 for table_name in tables_with_primary:
2922 table = Table(table_name, self.db_meta, \
2923 Column('id', Integer, primary_key = True), \
2924 autoload=True, useexisting=True)
2925 setattr(self, 'tbl_%s' % table_name, table)
2927 for table_name in tables_no_primary:
2928 table = Table(table_name, self.db_meta, autoload=True)
2929 setattr(self, 'tbl_%s' % table_name, table)
2931 for view_name in views:
2932 view = Table(view_name, self.db_meta, autoload=True)
2933 setattr(self, 'view_%s' % view_name, view)
2935 def __setupmappers(self):
2936 mapper(Architecture, self.tbl_architecture,
2937 properties = dict(arch_id = self.tbl_architecture.c.id,
2938 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2939 order_by='suite_name',
2940 backref=backref('architectures', order_by='arch_string'))),
2941 extension = validator)
2943 mapper(Archive, self.tbl_archive,
2944 properties = dict(archive_id = self.tbl_archive.c.id,
2945 archive_name = self.tbl_archive.c.name))
2947 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2948 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2949 filename = self.tbl_pending_bin_contents.c.filename,
2950 package = self.tbl_pending_bin_contents.c.package,
2951 version = self.tbl_pending_bin_contents.c.version,
2952 arch = self.tbl_pending_bin_contents.c.arch,
2953 otype = self.tbl_pending_bin_contents.c.type))
2955 mapper(DebContents, self.tbl_deb_contents,
2956 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2957 package=self.tbl_deb_contents.c.package,
2958 suite=self.tbl_deb_contents.c.suite,
2959 arch=self.tbl_deb_contents.c.arch,
2960 section=self.tbl_deb_contents.c.section,
2961 filename=self.tbl_deb_contents.c.filename))
2963 mapper(UdebContents, self.tbl_udeb_contents,
2964 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2965 package=self.tbl_udeb_contents.c.package,
2966 suite=self.tbl_udeb_contents.c.suite,
2967 arch=self.tbl_udeb_contents.c.arch,
2968 section=self.tbl_udeb_contents.c.section,
2969 filename=self.tbl_udeb_contents.c.filename))
2971 mapper(BuildQueue, self.tbl_build_queue,
2972 properties = dict(queue_id = self.tbl_build_queue.c.id))
2974 mapper(BuildQueueFile, self.tbl_build_queue_files,
2975 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2976 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2978 mapper(DBBinary, self.tbl_binaries,
2979 properties = dict(binary_id = self.tbl_binaries.c.id,
2980 package = self.tbl_binaries.c.package,
2981 version = self.tbl_binaries.c.version,
2982 maintainer_id = self.tbl_binaries.c.maintainer,
2983 maintainer = relation(Maintainer),
2984 source_id = self.tbl_binaries.c.source,
2985 source = relation(DBSource, backref='binaries'),
2986 arch_id = self.tbl_binaries.c.architecture,
2987 architecture = relation(Architecture),
2988 poolfile_id = self.tbl_binaries.c.file,
2989 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
2990 binarytype = self.tbl_binaries.c.type,
2991 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2992 fingerprint = relation(Fingerprint),
2993 install_date = self.tbl_binaries.c.install_date,
2994 suites = relation(Suite, secondary=self.tbl_bin_associations,
2995 backref=backref('binaries', lazy='dynamic'))),
2996 extension = validator)
2998 mapper(BinaryACL, self.tbl_binary_acl,
2999 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3001 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3002 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3003 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3004 architecture = relation(Architecture)))
3006 mapper(Component, self.tbl_component,
3007 properties = dict(component_id = self.tbl_component.c.id,
3008 component_name = self.tbl_component.c.name),
3009 extension = validator)
3011 mapper(DBConfig, self.tbl_config,
3012 properties = dict(config_id = self.tbl_config.c.id))
3014 mapper(DSCFile, self.tbl_dsc_files,
3015 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3016 source_id = self.tbl_dsc_files.c.source,
3017 source = relation(DBSource),
3018 poolfile_id = self.tbl_dsc_files.c.file,
3019 poolfile = relation(PoolFile)))
3021 mapper(PoolFile, self.tbl_files,
3022 properties = dict(file_id = self.tbl_files.c.id,
3023 filesize = self.tbl_files.c.size,
3024 location_id = self.tbl_files.c.location,
3025 location = relation(Location,
3026 # using lazy='dynamic' in the back
3027 # reference because we have A LOT of
3028 # files in one location
3029 backref=backref('files', lazy='dynamic'))),
3030 extension = validator)
3032 mapper(Fingerprint, self.tbl_fingerprint,
3033 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3034 uid_id = self.tbl_fingerprint.c.uid,
3035 uid = relation(Uid),
3036 keyring_id = self.tbl_fingerprint.c.keyring,
3037 keyring = relation(Keyring),
3038 source_acl = relation(SourceACL),
3039 binary_acl = relation(BinaryACL)),
3040 extension = validator)
3042 mapper(Keyring, self.tbl_keyrings,
3043 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3044 keyring_id = self.tbl_keyrings.c.id))
3046 mapper(DBChange, self.tbl_changes,
3047 properties = dict(change_id = self.tbl_changes.c.id,
3048 poolfiles = relation(PoolFile,
3049 secondary=self.tbl_changes_pool_files,
3050 backref="changeslinks"),
3051 seen = self.tbl_changes.c.seen,
3052 source = self.tbl_changes.c.source,
3053 binaries = self.tbl_changes.c.binaries,
3054 architecture = self.tbl_changes.c.architecture,
3055 distribution = self.tbl_changes.c.distribution,
3056 urgency = self.tbl_changes.c.urgency,
3057 maintainer = self.tbl_changes.c.maintainer,
3058 changedby = self.tbl_changes.c.changedby,
3059 date = self.tbl_changes.c.date,
3060 version = self.tbl_changes.c.version,
3061 files = relation(ChangePendingFile,
3062 secondary=self.tbl_changes_pending_files_map,
3063 backref="changesfile"),
3064 in_queue_id = self.tbl_changes.c.in_queue,
3065 in_queue = relation(PolicyQueue,
3066 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3067 approved_for_id = self.tbl_changes.c.approved_for))
3069 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3070 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3072 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3073 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3074 filename = self.tbl_changes_pending_files.c.filename,
3075 size = self.tbl_changes_pending_files.c.size,
3076 md5sum = self.tbl_changes_pending_files.c.md5sum,
3077 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3078 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3080 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3081 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3082 change = relation(DBChange),
3083 maintainer = relation(Maintainer,
3084 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3085 changedby = relation(Maintainer,
3086 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3087 fingerprint = relation(Fingerprint),
3088 source_files = relation(ChangePendingFile,
3089 secondary=self.tbl_changes_pending_source_files,
3090 backref="pending_sources")))
3093 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3094 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3095 keyring = relation(Keyring, backref="keyring_acl_map"),
3096 architecture = relation(Architecture)))
3098 mapper(Location, self.tbl_location,
3099 properties = dict(location_id = self.tbl_location.c.id,
3100 component_id = self.tbl_location.c.component,
3101 component = relation(Component, \
3102 backref=backref('location', uselist = False)),
3103 archive_id = self.tbl_location.c.archive,
3104 archive = relation(Archive),
3105 # FIXME: the 'type' column is old cruft and
3106 # should be removed in the future.
3107 archive_type = self.tbl_location.c.type),
3108 extension = validator)
3110 mapper(Maintainer, self.tbl_maintainer,
3111 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3112 maintains_sources = relation(DBSource, backref='maintainer',
3113 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3114 changed_sources = relation(DBSource, backref='changedby',
3115 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3116 extension = validator)
3118 mapper(NewComment, self.tbl_new_comments,
3119 properties = dict(comment_id = self.tbl_new_comments.c.id))
3121 mapper(Override, self.tbl_override,
3122 properties = dict(suite_id = self.tbl_override.c.suite,
3123 suite = relation(Suite),
3124 package = self.tbl_override.c.package,
3125 component_id = self.tbl_override.c.component,
3126 component = relation(Component),
3127 priority_id = self.tbl_override.c.priority,
3128 priority = relation(Priority),
3129 section_id = self.tbl_override.c.section,
3130 section = relation(Section),
3131 overridetype_id = self.tbl_override.c.type,
3132 overridetype = relation(OverrideType)))
3134 mapper(OverrideType, self.tbl_override_type,
3135 properties = dict(overridetype = self.tbl_override_type.c.type,
3136 overridetype_id = self.tbl_override_type.c.id))
3138 mapper(PolicyQueue, self.tbl_policy_queue,
3139 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3141 mapper(Priority, self.tbl_priority,
3142 properties = dict(priority_id = self.tbl_priority.c.id))
3144 mapper(Section, self.tbl_section,
3145 properties = dict(section_id = self.tbl_section.c.id,
3146 section=self.tbl_section.c.section))
3148 mapper(DBSource, self.tbl_source,
3149 properties = dict(source_id = self.tbl_source.c.id,
3150 version = self.tbl_source.c.version,
3151 maintainer_id = self.tbl_source.c.maintainer,
3152 poolfile_id = self.tbl_source.c.file,
3153 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3154 fingerprint_id = self.tbl_source.c.sig_fpr,
3155 fingerprint = relation(Fingerprint),
3156 changedby_id = self.tbl_source.c.changedby,
3157 srcfiles = relation(DSCFile,
3158 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3159 suites = relation(Suite, secondary=self.tbl_src_associations,
3160 backref=backref('sources', lazy='dynamic')),
3161 srcuploaders = relation(SrcUploader)),
3162 extension = validator)
3164 mapper(SourceACL, self.tbl_source_acl,
3165 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3167 mapper(SrcFormat, self.tbl_src_format,
3168 properties = dict(src_format_id = self.tbl_src_format.c.id,
3169 format_name = self.tbl_src_format.c.format_name))
3171 mapper(SrcUploader, self.tbl_src_uploaders,
3172 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3173 source_id = self.tbl_src_uploaders.c.source,
3174 source = relation(DBSource,
3175 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3176 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3177 maintainer = relation(Maintainer,
3178 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3180 mapper(Suite, self.tbl_suite,
3181 properties = dict(suite_id = self.tbl_suite.c.id,
3182 policy_queue = relation(PolicyQueue),
3183 copy_queues = relation(BuildQueue,
3184 secondary=self.tbl_suite_build_queue_copy)),
3185 extension = validator)
3187 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3188 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3189 suite = relation(Suite, backref='suitesrcformats'),
3190 src_format_id = self.tbl_suite_src_formats.c.src_format,
3191 src_format = relation(SrcFormat)))
3193 mapper(Uid, self.tbl_uid,
3194 properties = dict(uid_id = self.tbl_uid.c.id,
3195 fingerprint = relation(Fingerprint)),
3196 extension = validator)
3198 mapper(UploadBlock, self.tbl_upload_blocks,
3199 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3200 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3201 uid = relation(Uid, backref="uploadblocks")))
3203 ## Connection functions
3204 def __createconn(self):
3205 from config import Config
3209 connstr = "postgres://%s" % cnf["DB::Host"]
3210 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3211 connstr += ":%s" % cnf["DB::Port"]
3212 connstr += "/%s" % cnf["DB::Name"]
3215 connstr = "postgres:///%s" % cnf["DB::Name"]
3216 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3217 connstr += "?port=%s" % cnf["DB::Port"]
3219 self.db_pg = create_engine(connstr, echo=self.debug)
3220 self.db_meta = MetaData()
3221 self.db_meta.bind = self.db_pg
3222 self.db_smaker = sessionmaker(bind=self.db_pg,
3226 self.__setuptables()
3227 self.__setupmappers()
3230 return self.db_smaker()
3232 __all__.append('DBConn')