5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 # TODO: It is a bit awkward that the mapper configuration allow
266 # directly setting the numeric _id columns. We should get rid of it
268 if hasattr(self, property + '_id') and \
269 getattr(self, property + '_id') is not None:
271 if not hasattr(self, property) or getattr(self, property) is None:
272 raise DBUpdateError(self.validation_message % \
273 (property, str(self)))
277 def get(cls, primary_key, session = None):
279 This is a support function that allows getting an object by its primary
282 Architecture.get(3[, session])
284 instead of the more verbose
286 session.query(Architecture).get(3)
288 return session.query(cls).get(primary_key)
290 __all__.append('ORMObject')
292 ################################################################################
294 class Validator(MapperExtension):
296 This class calls the validate() method for each instance for the
297 'before_update' and 'before_insert' events. A global object validator is
298 used for configuring the individual mappers.
301 def before_update(self, mapper, connection, instance):
305 def before_insert(self, mapper, connection, instance):
309 validator = Validator()
311 ################################################################################
313 class Architecture(ORMObject):
314 def __init__(self, arch_string = None, description = None):
315 self.arch_string = arch_string
316 self.description = description
318 def __eq__(self, val):
319 if isinstance(val, str):
320 return (self.arch_string== val)
321 # This signals to use the normal comparison operator
322 return NotImplemented
324 def __ne__(self, val):
325 if isinstance(val, str):
326 return (self.arch_string != val)
327 # This signals to use the normal comparison operator
328 return NotImplemented
330 def properties(self):
331 return ['arch_string', 'arch_id', 'suites_count']
333 def not_null_constraints(self):
334 return ['arch_string']
336 __all__.append('Architecture')
339 def get_architecture(architecture, session=None):
341 Returns database id for given C{architecture}.
343 @type architecture: string
344 @param architecture: The name of the architecture
346 @type session: Session
347 @param session: Optional SQLA session object (a temporary one will be
348 generated if not supplied)
351 @return: Architecture object for the given arch (None if not present)
354 q = session.query(Architecture).filter_by(arch_string=architecture)
358 except NoResultFound:
361 __all__.append('get_architecture')
363 # TODO: should be removed because the implementation is too trivial
365 def get_architecture_suites(architecture, session=None):
367 Returns list of Suite objects for given C{architecture} name
369 @type architecture: str
370 @param architecture: Architecture name to search for
372 @type session: Session
373 @param session: Optional SQL session object (a temporary one will be
374 generated if not supplied)
377 @return: list of Suite objects for the given name (may be empty)
380 return get_architecture(architecture, session).suites
382 __all__.append('get_architecture_suites')
384 ################################################################################
386 class Archive(object):
387 def __init__(self, *args, **kwargs):
391 return '<Archive %s>' % self.archive_name
393 __all__.append('Archive')
396 def get_archive(archive, session=None):
398 returns database id for given C{archive}.
400 @type archive: string
401 @param archive: the name of the arhive
403 @type session: Session
404 @param session: Optional SQLA session object (a temporary one will be
405 generated if not supplied)
408 @return: Archive object for the given name (None if not present)
411 archive = archive.lower()
413 q = session.query(Archive).filter_by(archive_name=archive)
417 except NoResultFound:
420 __all__.append('get_archive')
422 ################################################################################
424 class BinContents(object):
425 def __init__(self, *args, **kwargs):
429 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
431 __all__.append('BinContents')
433 ################################################################################
435 class DBBinary(ORMObject):
436 def __init__(self, package = None, source = None, version = None, \
437 maintainer = None, architecture = None, poolfile = None, \
439 self.package = package
441 self.version = version
442 self.maintainer = maintainer
443 self.architecture = architecture
444 self.poolfile = poolfile
445 self.binarytype = binarytype
447 def properties(self):
448 return ['package', 'version', 'maintainer', 'source', 'architecture', \
449 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
450 'suites_count', 'binary_id']
452 def not_null_constraints(self):
453 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
456 __all__.append('DBBinary')
459 def get_suites_binary_in(package, session=None):
461 Returns list of Suite objects which given C{package} name is in
464 @param package: DBBinary package name to search for
467 @return: list of Suite objects for the given package
470 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
472 __all__.append('get_suites_binary_in')
475 def get_binary_from_name_suite(package, suitename, session=None):
476 ### For dak examine-package
477 ### XXX: Doesn't use object API yet
479 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
480 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
481 WHERE b.package='%(package)s'
483 AND fi.location = l.id
484 AND l.component = c.id
487 AND su.suite_name %(suitename)s
488 ORDER BY b.version DESC"""
490 return session.execute(sql % {'package': package, 'suitename': suitename})
492 __all__.append('get_binary_from_name_suite')
495 def get_binary_components(package, suitename, arch, session=None):
496 # Check for packages that have moved from one component to another
497 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
498 WHERE b.package=:package AND s.suite_name=:suitename
499 AND (a.arch_string = :arch OR a.arch_string = 'all')
500 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
501 AND f.location = l.id
502 AND l.component = c.id
505 vals = {'package': package, 'suitename': suitename, 'arch': arch}
507 return session.execute(query, vals)
509 __all__.append('get_binary_components')
511 ################################################################################
513 class BinaryACL(object):
514 def __init__(self, *args, **kwargs):
518 return '<BinaryACL %s>' % self.binary_acl_id
520 __all__.append('BinaryACL')
522 ################################################################################
524 class BinaryACLMap(object):
525 def __init__(self, *args, **kwargs):
529 return '<BinaryACLMap %s>' % self.binary_acl_map_id
531 __all__.append('BinaryACLMap')
533 ################################################################################
538 ArchiveDir "%(archivepath)s";
539 OverrideDir "%(overridedir)s";
540 CacheDir "%(cachedir)s";
545 Packages::Compress ". bzip2 gzip";
546 Sources::Compress ". bzip2 gzip";
551 bindirectory "incoming"
556 BinOverride "override.sid.all3";
557 BinCacheDB "packages-accepted.db";
559 FileList "%(filelist)s";
562 Packages::Extensions ".deb .udeb";
565 bindirectory "incoming/"
568 BinOverride "override.sid.all3";
569 SrcOverride "override.sid.all3.src";
570 FileList "%(filelist)s";
574 class BuildQueue(object):
575 def __init__(self, *args, **kwargs):
579 return '<BuildQueue %s>' % self.queue_name
581 def write_metadata(self, starttime, force=False):
582 # Do we write out metafiles?
583 if not (force or self.generate_metadata):
586 session = DBConn().session().object_session(self)
588 fl_fd = fl_name = ac_fd = ac_name = None
590 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
591 startdir = os.getcwd()
594 # Grab files we want to include
595 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
596 # Write file list with newer files
597 (fl_fd, fl_name) = mkstemp()
599 os.write(fl_fd, '%s\n' % n.fullpath)
604 # Write minimal apt.conf
605 # TODO: Remove hardcoding from template
606 (ac_fd, ac_name) = mkstemp()
607 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
609 'cachedir': cnf["Dir::Cache"],
610 'overridedir': cnf["Dir::Override"],
614 # Run apt-ftparchive generate
615 os.chdir(os.path.dirname(ac_name))
616 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
618 # Run apt-ftparchive release
619 # TODO: Eww - fix this
620 bname = os.path.basename(self.path)
624 # We have to remove the Release file otherwise it'll be included in the
627 os.unlink(os.path.join(bname, 'Release'))
631 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
633 # Crude hack with open and append, but this whole section is and should be redone.
634 if self.notautomatic:
635 release=open("Release", "a")
636 release.write("NotAutomatic: yes")
641 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
642 if cnf.has_key("Dinstall::SigningPubKeyring"):
643 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
645 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
647 # Move the files if we got this far
648 os.rename('Release', os.path.join(bname, 'Release'))
650 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
652 # Clean up any left behind files
679 def clean_and_update(self, starttime, Logger, dryrun=False):
680 """WARNING: This routine commits for you"""
681 session = DBConn().session().object_session(self)
683 if self.generate_metadata and not dryrun:
684 self.write_metadata(starttime)
686 # Grab files older than our execution time
687 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
693 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
695 Logger.log(["I: Removing %s from the queue" % o.fullpath])
696 os.unlink(o.fullpath)
699 # If it wasn't there, don't worry
700 if e.errno == ENOENT:
703 # TODO: Replace with proper logging call
704 Logger.log(["E: Could not remove %s" % o.fullpath])
711 for f in os.listdir(self.path):
712 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
716 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
717 except NoResultFound:
718 fp = os.path.join(self.path, f)
720 Logger.log(["I: Would remove unused link %s" % fp])
722 Logger.log(["I: Removing unused link %s" % fp])
726 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
728 def add_file_from_pool(self, poolfile):
729 """Copies a file into the pool. Assumes that the PoolFile object is
730 attached to the same SQLAlchemy session as the Queue object is.
732 The caller is responsible for committing after calling this function."""
733 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
735 # Check if we have a file of this name or this ID already
736 for f in self.queuefiles:
737 if f.fileid is not None and f.fileid == poolfile.file_id or \
738 f.poolfile.filename == poolfile_basename:
739 # In this case, update the BuildQueueFile entry so we
740 # don't remove it too early
741 f.lastused = datetime.now()
742 DBConn().session().object_session(poolfile).add(f)
745 # Prepare BuildQueueFile object
746 qf = BuildQueueFile()
747 qf.build_queue_id = self.queue_id
748 qf.lastused = datetime.now()
749 qf.filename = poolfile_basename
751 targetpath = poolfile.fullpath
752 queuepath = os.path.join(self.path, poolfile_basename)
756 # We need to copy instead of symlink
758 utils.copy(targetpath, queuepath)
759 # NULL in the fileid field implies a copy
762 os.symlink(targetpath, queuepath)
763 qf.fileid = poolfile.file_id
767 # Get the same session as the PoolFile is using and add the qf to it
768 DBConn().session().object_session(poolfile).add(qf)
773 __all__.append('BuildQueue')
776 def get_build_queue(queuename, session=None):
778 Returns BuildQueue object for given C{queue name}, creating it if it does not
781 @type queuename: string
782 @param queuename: The name of the queue
784 @type session: Session
785 @param session: Optional SQLA session object (a temporary one will be
786 generated if not supplied)
789 @return: BuildQueue object for the given queue
792 q = session.query(BuildQueue).filter_by(queue_name=queuename)
796 except NoResultFound:
799 __all__.append('get_build_queue')
801 ################################################################################
803 class BuildQueueFile(object):
804 def __init__(self, *args, **kwargs):
808 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
812 return os.path.join(self.buildqueue.path, self.filename)
815 __all__.append('BuildQueueFile')
817 ################################################################################
819 class ChangePendingBinary(object):
820 def __init__(self, *args, **kwargs):
824 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
826 __all__.append('ChangePendingBinary')
828 ################################################################################
830 class ChangePendingFile(object):
831 def __init__(self, *args, **kwargs):
835 return '<ChangePendingFile %s>' % self.change_pending_file_id
837 __all__.append('ChangePendingFile')
839 ################################################################################
841 class ChangePendingSource(object):
842 def __init__(self, *args, **kwargs):
846 return '<ChangePendingSource %s>' % self.change_pending_source_id
848 __all__.append('ChangePendingSource')
850 ################################################################################
852 class Component(ORMObject):
853 def __init__(self, component_name = None):
854 self.component_name = component_name
856 def __eq__(self, val):
857 if isinstance(val, str):
858 return (self.component_name == val)
859 # This signals to use the normal comparison operator
860 return NotImplemented
862 def __ne__(self, val):
863 if isinstance(val, str):
864 return (self.component_name != val)
865 # This signals to use the normal comparison operator
866 return NotImplemented
868 def properties(self):
869 return ['component_name', 'component_id', 'description', 'location', \
872 def not_null_constraints(self):
873 return ['component_name']
876 __all__.append('Component')
879 def get_component(component, session=None):
881 Returns database id for given C{component}.
883 @type component: string
884 @param component: The name of the override type
887 @return: the database id for the given component
890 component = component.lower()
892 q = session.query(Component).filter_by(component_name=component)
896 except NoResultFound:
899 __all__.append('get_component')
901 ################################################################################
903 class DBConfig(object):
904 def __init__(self, *args, **kwargs):
908 return '<DBConfig %s>' % self.name
910 __all__.append('DBConfig')
912 ################################################################################
915 def get_or_set_contents_file_id(filename, session=None):
917 Returns database id for given filename.
919 If no matching file is found, a row is inserted.
921 @type filename: string
922 @param filename: The filename
923 @type session: SQLAlchemy
924 @param session: Optional SQL session object (a temporary one will be
925 generated if not supplied). If not passed, a commit will be performed at
926 the end of the function, otherwise the caller is responsible for commiting.
929 @return: the database id for the given component
932 q = session.query(ContentFilename).filter_by(filename=filename)
935 ret = q.one().cafilename_id
936 except NoResultFound:
937 cf = ContentFilename()
938 cf.filename = filename
940 session.commit_or_flush()
941 ret = cf.cafilename_id
945 __all__.append('get_or_set_contents_file_id')
948 def get_contents(suite, overridetype, section=None, session=None):
950 Returns contents for a suite / overridetype combination, limiting
951 to a section if not None.
954 @param suite: Suite object
956 @type overridetype: OverrideType
957 @param overridetype: OverrideType object
959 @type section: Section
960 @param section: Optional section object to limit results to
962 @type session: SQLAlchemy
963 @param session: Optional SQL session object (a temporary one will be
964 generated if not supplied)
967 @return: ResultsProxy object set up to return tuples of (filename, section,
971 # find me all of the contents for a given suite
972 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
976 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
977 JOIN content_file_names n ON (c.filename=n.id)
978 JOIN binaries b ON (b.id=c.binary_pkg)
979 JOIN override o ON (o.package=b.package)
980 JOIN section s ON (s.id=o.section)
981 WHERE o.suite = :suiteid AND o.type = :overridetypeid
982 AND b.type=:overridetypename"""
984 vals = {'suiteid': suite.suite_id,
985 'overridetypeid': overridetype.overridetype_id,
986 'overridetypename': overridetype.overridetype}
988 if section is not None:
989 contents_q += " AND s.id = :sectionid"
990 vals['sectionid'] = section.section_id
992 contents_q += " ORDER BY fn"
994 return session.execute(contents_q, vals)
996 __all__.append('get_contents')
998 ################################################################################
1000 class ContentFilepath(object):
1001 def __init__(self, *args, **kwargs):
1005 return '<ContentFilepath %s>' % self.filepath
1007 __all__.append('ContentFilepath')
1010 def get_or_set_contents_path_id(filepath, session=None):
1012 Returns database id for given path.
1014 If no matching file is found, a row is inserted.
1016 @type filepath: string
1017 @param filepath: The filepath
1019 @type session: SQLAlchemy
1020 @param session: Optional SQL session object (a temporary one will be
1021 generated if not supplied). If not passed, a commit will be performed at
1022 the end of the function, otherwise the caller is responsible for commiting.
1025 @return: the database id for the given path
1028 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1031 ret = q.one().cafilepath_id
1032 except NoResultFound:
1033 cf = ContentFilepath()
1034 cf.filepath = filepath
1036 session.commit_or_flush()
1037 ret = cf.cafilepath_id
1041 __all__.append('get_or_set_contents_path_id')
1043 ################################################################################
1045 class ContentAssociation(object):
1046 def __init__(self, *args, **kwargs):
1050 return '<ContentAssociation %s>' % self.ca_id
1052 __all__.append('ContentAssociation')
1054 def insert_content_paths(binary_id, fullpaths, session=None):
1056 Make sure given path is associated with given binary id
1058 @type binary_id: int
1059 @param binary_id: the id of the binary
1060 @type fullpaths: list
1061 @param fullpaths: the list of paths of the file being associated with the binary
1062 @type session: SQLAlchemy session
1063 @param session: Optional SQLAlchemy session. If this is passed, the caller
1064 is responsible for ensuring a transaction has begun and committing the
1065 results or rolling back based on the result code. If not passed, a commit
1066 will be performed at the end of the function, otherwise the caller is
1067 responsible for commiting.
1069 @return: True upon success
1072 privatetrans = False
1074 session = DBConn().session()
1079 def generate_path_dicts():
1080 for fullpath in fullpaths:
1081 if fullpath.startswith( './' ):
1082 fullpath = fullpath[2:]
1084 yield {'filename':fullpath, 'id': binary_id }
1086 for d in generate_path_dicts():
1087 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1096 traceback.print_exc()
1098 # Only rollback if we set up the session ourself
1105 __all__.append('insert_content_paths')
1107 ################################################################################
1109 class DSCFile(object):
1110 def __init__(self, *args, **kwargs):
1114 return '<DSCFile %s>' % self.dscfile_id
1116 __all__.append('DSCFile')
1119 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1121 Returns a list of DSCFiles which may be empty
1123 @type dscfile_id: int (optional)
1124 @param dscfile_id: the dscfile_id of the DSCFiles to find
1126 @type source_id: int (optional)
1127 @param source_id: the source id related to the DSCFiles to find
1129 @type poolfile_id: int (optional)
1130 @param poolfile_id: the poolfile id related to the DSCFiles to find
1133 @return: Possibly empty list of DSCFiles
1136 q = session.query(DSCFile)
1138 if dscfile_id is not None:
1139 q = q.filter_by(dscfile_id=dscfile_id)
1141 if source_id is not None:
1142 q = q.filter_by(source_id=source_id)
1144 if poolfile_id is not None:
1145 q = q.filter_by(poolfile_id=poolfile_id)
1149 __all__.append('get_dscfiles')
1151 ################################################################################
1153 class PoolFile(ORMObject):
1154 def __init__(self, filename = None, location = None, filesize = -1, \
1156 self.filename = filename
1157 self.location = location
1158 self.filesize = filesize
1159 self.md5sum = md5sum
1163 return os.path.join(self.location.path, self.filename)
1165 def is_valid(self, filesize = -1, md5sum = None):\
1166 return self.filesize == filesize and self.md5sum == md5sum
1168 def properties(self):
1169 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1170 'sha256sum', 'location', 'source', 'binary', 'last_used']
1172 def not_null_constraints(self):
1173 return ['filename', 'md5sum', 'location']
1175 __all__.append('PoolFile')
1178 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1181 (ValidFileFound [boolean], PoolFile object or None)
1183 @type filename: string
1184 @param filename: the filename of the file to check against the DB
1187 @param filesize: the size of the file to check against the DB
1189 @type md5sum: string
1190 @param md5sum: the md5sum of the file to check against the DB
1192 @type location_id: int
1193 @param location_id: the id of the location to look in
1196 @return: Tuple of length 2.
1197 - If valid pool file found: (C{True}, C{PoolFile object})
1198 - If valid pool file not found:
1199 - (C{False}, C{None}) if no file found
1200 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1203 poolfile = session.query(Location).get(location_id). \
1204 files.filter_by(filename=filename).first()
1206 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1209 return (valid, poolfile)
1211 __all__.append('check_poolfile')
1213 # TODO: the implementation can trivially be inlined at the place where the
1214 # function is called
1216 def get_poolfile_by_id(file_id, session=None):
1218 Returns a PoolFile objects or None for the given id
1221 @param file_id: the id of the file to look for
1223 @rtype: PoolFile or None
1224 @return: either the PoolFile object or None
1227 return session.query(PoolFile).get(file_id)
1229 __all__.append('get_poolfile_by_id')
1232 def get_poolfile_like_name(filename, session=None):
1234 Returns an array of PoolFile objects which are like the given name
1236 @type filename: string
1237 @param filename: the filename of the file to check against the DB
1240 @return: array of PoolFile objects
1243 # TODO: There must be a way of properly using bind parameters with %FOO%
1244 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1248 __all__.append('get_poolfile_like_name')
1251 def add_poolfile(filename, datadict, location_id, session=None):
1253 Add a new file to the pool
1255 @type filename: string
1256 @param filename: filename
1258 @type datadict: dict
1259 @param datadict: dict with needed data
1261 @type location_id: int
1262 @param location_id: database id of the location
1265 @return: the PoolFile object created
1267 poolfile = PoolFile()
1268 poolfile.filename = filename
1269 poolfile.filesize = datadict["size"]
1270 poolfile.md5sum = datadict["md5sum"]
1271 poolfile.sha1sum = datadict["sha1sum"]
1272 poolfile.sha256sum = datadict["sha256sum"]
1273 poolfile.location_id = location_id
1275 session.add(poolfile)
1276 # Flush to get a file id (NB: This is not a commit)
1281 __all__.append('add_poolfile')
1283 ################################################################################
1285 class Fingerprint(ORMObject):
1286 def __init__(self, fingerprint = None):
1287 self.fingerprint = fingerprint
1289 def properties(self):
1290 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1293 def not_null_constraints(self):
1294 return ['fingerprint']
1296 __all__.append('Fingerprint')
1299 def get_fingerprint(fpr, session=None):
1301 Returns Fingerprint object for given fpr.
1304 @param fpr: The fpr to find / add
1306 @type session: SQLAlchemy
1307 @param session: Optional SQL session object (a temporary one will be
1308 generated if not supplied).
1311 @return: the Fingerprint object for the given fpr or None
1314 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1318 except NoResultFound:
1323 __all__.append('get_fingerprint')
1326 def get_or_set_fingerprint(fpr, session=None):
1328 Returns Fingerprint object for given fpr.
1330 If no matching fpr is found, a row is inserted.
1333 @param fpr: The fpr to find / add
1335 @type session: SQLAlchemy
1336 @param session: Optional SQL session object (a temporary one will be
1337 generated if not supplied). If not passed, a commit will be performed at
1338 the end of the function, otherwise the caller is responsible for commiting.
1339 A flush will be performed either way.
1342 @return: the Fingerprint object for the given fpr
1345 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1349 except NoResultFound:
1350 fingerprint = Fingerprint()
1351 fingerprint.fingerprint = fpr
1352 session.add(fingerprint)
1353 session.commit_or_flush()
1358 __all__.append('get_or_set_fingerprint')
1360 ################################################################################
1362 # Helper routine for Keyring class
1363 def get_ldap_name(entry):
1365 for k in ["cn", "mn", "sn"]:
1367 if ret and ret[0] != "" and ret[0] != "-":
1369 return " ".join(name)
1371 ################################################################################
1373 class Keyring(object):
1374 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1375 " --with-colons --fingerprint --fingerprint"
1380 def __init__(self, *args, **kwargs):
1384 return '<Keyring %s>' % self.keyring_name
1386 def de_escape_gpg_str(self, txt):
1387 esclist = re.split(r'(\\x..)', txt)
1388 for x in range(1,len(esclist),2):
1389 esclist[x] = "%c" % (int(esclist[x][2:],16))
1390 return "".join(esclist)
1392 def parse_address(self, uid):
1393 """parses uid and returns a tuple of real name and email address"""
1395 (name, address) = email.Utils.parseaddr(uid)
1396 name = re.sub(r"\s*[(].*[)]", "", name)
1397 name = self.de_escape_gpg_str(name)
1400 return (name, address)
1402 def load_keys(self, keyring):
1403 if not self.keyring_id:
1404 raise Exception('Must be initialized with database information')
1406 k = os.popen(self.gpg_invocation % keyring, "r")
1410 for line in k.xreadlines():
1411 field = line.split(":")
1412 if field[0] == "pub":
1415 (name, addr) = self.parse_address(field[9])
1417 self.keys[key]["email"] = addr
1418 self.keys[key]["name"] = name
1419 self.keys[key]["fingerprints"] = []
1421 elif key and field[0] == "sub" and len(field) >= 12:
1422 signingkey = ("s" in field[11])
1423 elif key and field[0] == "uid":
1424 (name, addr) = self.parse_address(field[9])
1425 if "email" not in self.keys[key] and "@" in addr:
1426 self.keys[key]["email"] = addr
1427 self.keys[key]["name"] = name
1428 elif signingkey and field[0] == "fpr":
1429 self.keys[key]["fingerprints"].append(field[9])
1430 self.fpr_lookup[field[9]] = key
1432 def import_users_from_ldap(self, session):
1436 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1437 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1439 l = ldap.open(LDAPServer)
1440 l.simple_bind_s("","")
1441 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1442 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1443 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1445 ldap_fin_uid_id = {}
1452 uid = entry["uid"][0]
1453 name = get_ldap_name(entry)
1454 fingerprints = entry["keyFingerPrint"]
1456 for f in fingerprints:
1457 key = self.fpr_lookup.get(f, None)
1458 if key not in self.keys:
1460 self.keys[key]["uid"] = uid
1464 keyid = get_or_set_uid(uid, session).uid_id
1465 byuid[keyid] = (uid, name)
1466 byname[uid] = (keyid, name)
1468 return (byname, byuid)
1470 def generate_users_from_keyring(self, format, session):
1474 for x in self.keys.keys():
1475 if "email" not in self.keys[x]:
1477 self.keys[x]["uid"] = format % "invalid-uid"
1479 uid = format % self.keys[x]["email"]
1480 keyid = get_or_set_uid(uid, session).uid_id
1481 byuid[keyid] = (uid, self.keys[x]["name"])
1482 byname[uid] = (keyid, self.keys[x]["name"])
1483 self.keys[x]["uid"] = uid
1486 uid = format % "invalid-uid"
1487 keyid = get_or_set_uid(uid, session).uid_id
1488 byuid[keyid] = (uid, "ungeneratable user id")
1489 byname[uid] = (keyid, "ungeneratable user id")
1491 return (byname, byuid)
1493 __all__.append('Keyring')
1496 def get_keyring(keyring, session=None):
1498 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1499 If C{keyring} already has an entry, simply return the existing Keyring
1501 @type keyring: string
1502 @param keyring: the keyring name
1505 @return: the Keyring object for this keyring
1508 q = session.query(Keyring).filter_by(keyring_name=keyring)
1512 except NoResultFound:
1515 __all__.append('get_keyring')
1517 ################################################################################
1519 class KeyringACLMap(object):
1520 def __init__(self, *args, **kwargs):
1524 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1526 __all__.append('KeyringACLMap')
1528 ################################################################################
1530 class DBChange(object):
1531 def __init__(self, *args, **kwargs):
1535 return '<DBChange %s>' % self.changesname
1537 def clean_from_queue(self):
1538 session = DBConn().session().object_session(self)
1540 # Remove changes_pool_files entries
1543 # Remove changes_pending_files references
1546 # Clear out of queue
1547 self.in_queue = None
1548 self.approved_for_id = None
1550 __all__.append('DBChange')
1553 def get_dbchange(filename, session=None):
1555 returns DBChange object for given C{filename}.
1557 @type filename: string
1558 @param filename: the name of the file
1560 @type session: Session
1561 @param session: Optional SQLA session object (a temporary one will be
1562 generated if not supplied)
1565 @return: DBChange object for the given filename (C{None} if not present)
1568 q = session.query(DBChange).filter_by(changesname=filename)
1572 except NoResultFound:
1575 __all__.append('get_dbchange')
1577 ################################################################################
1579 # TODO: Why do we have a separate Location class? Can't it be fully integrated
1580 # into class Component?
1581 class Location(ORMObject):
1582 def __init__(self, path = None, component = None):
1584 self.component = component
1585 # the column 'type' should go away, see comment at mapper
1586 self.archive_type = 'pool'
1588 def properties(self):
1589 return ['path', 'archive_type', 'component', 'files_count']
1591 def not_null_constraints(self):
1592 return ['path', 'archive_type']
1594 __all__.append('Location')
1597 def get_location(location, component=None, archive=None, session=None):
1599 Returns Location object for the given combination of location, component
1602 @type location: string
1603 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1605 @type component: string
1606 @param component: the component name (if None, no restriction applied)
1608 @type archive: string
1609 @param archive: the archive name (if None, no restriction applied)
1611 @rtype: Location / None
1612 @return: Either a Location object or None if one can't be found
1615 q = session.query(Location).filter_by(path=location)
1617 if archive is not None:
1618 q = q.join(Archive).filter_by(archive_name=archive)
1620 if component is not None:
1621 q = q.join(Component).filter_by(component_name=component)
1625 except NoResultFound:
1628 __all__.append('get_location')
1630 ################################################################################
1632 class Maintainer(ORMObject):
1633 def __init__(self, name = None):
1636 def properties(self):
1637 return ['name', 'maintainer_id']
1639 def not_null_constraints(self):
1642 def get_split_maintainer(self):
1643 if not hasattr(self, 'name') or self.name is None:
1644 return ('', '', '', '')
1646 return fix_maintainer(self.name.strip())
1648 __all__.append('Maintainer')
1651 def get_or_set_maintainer(name, session=None):
1653 Returns Maintainer object for given maintainer name.
1655 If no matching maintainer name is found, a row is inserted.
1658 @param name: The maintainer name to add
1660 @type session: SQLAlchemy
1661 @param session: Optional SQL session object (a temporary one will be
1662 generated if not supplied). If not passed, a commit will be performed at
1663 the end of the function, otherwise the caller is responsible for commiting.
1664 A flush will be performed either way.
1667 @return: the Maintainer object for the given maintainer
1670 q = session.query(Maintainer).filter_by(name=name)
1673 except NoResultFound:
1674 maintainer = Maintainer()
1675 maintainer.name = name
1676 session.add(maintainer)
1677 session.commit_or_flush()
1682 __all__.append('get_or_set_maintainer')
1685 def get_maintainer(maintainer_id, session=None):
1687 Return the name of the maintainer behind C{maintainer_id} or None if that
1688 maintainer_id is invalid.
1690 @type maintainer_id: int
1691 @param maintainer_id: the id of the maintainer
1694 @return: the Maintainer with this C{maintainer_id}
1697 return session.query(Maintainer).get(maintainer_id)
1699 __all__.append('get_maintainer')
1701 ################################################################################
1703 class NewComment(object):
1704 def __init__(self, *args, **kwargs):
1708 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1710 __all__.append('NewComment')
1713 def has_new_comment(package, version, session=None):
1715 Returns true if the given combination of C{package}, C{version} has a comment.
1717 @type package: string
1718 @param package: name of the package
1720 @type version: string
1721 @param version: package version
1723 @type session: Session
1724 @param session: Optional SQLA session object (a temporary one will be
1725 generated if not supplied)
1731 q = session.query(NewComment)
1732 q = q.filter_by(package=package)
1733 q = q.filter_by(version=version)
1735 return bool(q.count() > 0)
1737 __all__.append('has_new_comment')
1740 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1742 Returns (possibly empty) list of NewComment objects for the given
1745 @type package: string (optional)
1746 @param package: name of the package
1748 @type version: string (optional)
1749 @param version: package version
1751 @type comment_id: int (optional)
1752 @param comment_id: An id of a comment
1754 @type session: Session
1755 @param session: Optional SQLA session object (a temporary one will be
1756 generated if not supplied)
1759 @return: A (possibly empty) list of NewComment objects will be returned
1762 q = session.query(NewComment)
1763 if package is not None: q = q.filter_by(package=package)
1764 if version is not None: q = q.filter_by(version=version)
1765 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1769 __all__.append('get_new_comments')
1771 ################################################################################
1773 class Override(object):
1774 def __init__(self, *args, **kwargs):
1778 return '<Override %s (%s)>' % (self.package, self.suite_id)
1780 __all__.append('Override')
1783 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1785 Returns Override object for the given parameters
1787 @type package: string
1788 @param package: The name of the package
1790 @type suite: string, list or None
1791 @param suite: The name of the suite (or suites if a list) to limit to. If
1792 None, don't limit. Defaults to None.
1794 @type component: string, list or None
1795 @param component: The name of the component (or components if a list) to
1796 limit to. If None, don't limit. Defaults to None.
1798 @type overridetype: string, list or None
1799 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1800 limit to. If None, don't limit. Defaults to None.
1802 @type session: Session
1803 @param session: Optional SQLA session object (a temporary one will be
1804 generated if not supplied)
1807 @return: A (possibly empty) list of Override objects will be returned
1810 q = session.query(Override)
1811 q = q.filter_by(package=package)
1813 if suite is not None:
1814 if not isinstance(suite, list): suite = [suite]
1815 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1817 if component is not None:
1818 if not isinstance(component, list): component = [component]
1819 q = q.join(Component).filter(Component.component_name.in_(component))
1821 if overridetype is not None:
1822 if not isinstance(overridetype, list): overridetype = [overridetype]
1823 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1827 __all__.append('get_override')
1830 ################################################################################
1832 class OverrideType(object):
1833 def __init__(self, *args, **kwargs):
1837 return '<OverrideType %s>' % self.overridetype
1839 __all__.append('OverrideType')
1842 def get_override_type(override_type, session=None):
1844 Returns OverrideType object for given C{override type}.
1846 @type override_type: string
1847 @param override_type: The name of the override type
1849 @type session: Session
1850 @param session: Optional SQLA session object (a temporary one will be
1851 generated if not supplied)
1854 @return: the database id for the given override type
1857 q = session.query(OverrideType).filter_by(overridetype=override_type)
1861 except NoResultFound:
1864 __all__.append('get_override_type')
1866 ################################################################################
1868 class DebContents(object):
1869 def __init__(self, *args, **kwargs):
1873 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1875 __all__.append('DebContents')
1878 class UdebContents(object):
1879 def __init__(self, *args, **kwargs):
1883 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1885 __all__.append('UdebContents')
1887 class PendingBinContents(object):
1888 def __init__(self, *args, **kwargs):
1892 return '<PendingBinContents %s>' % self.contents_id
1894 __all__.append('PendingBinContents')
1896 def insert_pending_content_paths(package,
1901 Make sure given paths are temporarily associated with given
1905 @param package: the package to associate with should have been read in from the binary control file
1906 @type fullpaths: list
1907 @param fullpaths: the list of paths of the file being associated with the binary
1908 @type session: SQLAlchemy session
1909 @param session: Optional SQLAlchemy session. If this is passed, the caller
1910 is responsible for ensuring a transaction has begun and committing the
1911 results or rolling back based on the result code. If not passed, a commit
1912 will be performed at the end of the function
1914 @return: True upon success, False if there is a problem
1917 privatetrans = False
1920 session = DBConn().session()
1924 arch = get_architecture(package['Architecture'], session)
1925 arch_id = arch.arch_id
1927 # Remove any already existing recorded files for this package
1928 q = session.query(PendingBinContents)
1929 q = q.filter_by(package=package['Package'])
1930 q = q.filter_by(version=package['Version'])
1931 q = q.filter_by(architecture=arch_id)
1934 for fullpath in fullpaths:
1936 if fullpath.startswith( "./" ):
1937 fullpath = fullpath[2:]
1939 pca = PendingBinContents()
1940 pca.package = package['Package']
1941 pca.version = package['Version']
1943 pca.architecture = arch_id
1946 pca.type = 8 # gross
1948 pca.type = 7 # also gross
1951 # Only commit if we set up the session ourself
1959 except Exception, e:
1960 traceback.print_exc()
1962 # Only rollback if we set up the session ourself
1969 __all__.append('insert_pending_content_paths')
1971 ################################################################################
1973 class PolicyQueue(object):
1974 def __init__(self, *args, **kwargs):
1978 return '<PolicyQueue %s>' % self.queue_name
1980 __all__.append('PolicyQueue')
1983 def get_policy_queue(queuename, session=None):
1985 Returns PolicyQueue object for given C{queue name}
1987 @type queuename: string
1988 @param queuename: The name of the queue
1990 @type session: Session
1991 @param session: Optional SQLA session object (a temporary one will be
1992 generated if not supplied)
1995 @return: PolicyQueue object for the given queue
1998 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2002 except NoResultFound:
2005 __all__.append('get_policy_queue')
2008 def get_policy_queue_from_path(pathname, session=None):
2010 Returns PolicyQueue object for given C{path name}
2012 @type queuename: string
2013 @param queuename: The path
2015 @type session: Session
2016 @param session: Optional SQLA session object (a temporary one will be
2017 generated if not supplied)
2020 @return: PolicyQueue object for the given queue
2023 q = session.query(PolicyQueue).filter_by(path=pathname)
2027 except NoResultFound:
2030 __all__.append('get_policy_queue_from_path')
2032 ################################################################################
2034 class Priority(object):
2035 def __init__(self, *args, **kwargs):
2038 def __eq__(self, val):
2039 if isinstance(val, str):
2040 return (self.priority == val)
2041 # This signals to use the normal comparison operator
2042 return NotImplemented
2044 def __ne__(self, val):
2045 if isinstance(val, str):
2046 return (self.priority != val)
2047 # This signals to use the normal comparison operator
2048 return NotImplemented
2051 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2053 __all__.append('Priority')
2056 def get_priority(priority, session=None):
2058 Returns Priority object for given C{priority name}.
2060 @type priority: string
2061 @param priority: The name of the priority
2063 @type session: Session
2064 @param session: Optional SQLA session object (a temporary one will be
2065 generated if not supplied)
2068 @return: Priority object for the given priority
2071 q = session.query(Priority).filter_by(priority=priority)
2075 except NoResultFound:
2078 __all__.append('get_priority')
2081 def get_priorities(session=None):
2083 Returns dictionary of priority names -> id mappings
2085 @type session: Session
2086 @param session: Optional SQL session object (a temporary one will be
2087 generated if not supplied)
2090 @return: dictionary of priority names -> id mappings
2094 q = session.query(Priority)
2096 ret[x.priority] = x.priority_id
2100 __all__.append('get_priorities')
2102 ################################################################################
2104 class Section(object):
2105 def __init__(self, *args, **kwargs):
2108 def __eq__(self, val):
2109 if isinstance(val, str):
2110 return (self.section == val)
2111 # This signals to use the normal comparison operator
2112 return NotImplemented
2114 def __ne__(self, val):
2115 if isinstance(val, str):
2116 return (self.section != val)
2117 # This signals to use the normal comparison operator
2118 return NotImplemented
2121 return '<Section %s>' % self.section
2123 __all__.append('Section')
2126 def get_section(section, session=None):
2128 Returns Section object for given C{section name}.
2130 @type section: string
2131 @param section: The name of the section
2133 @type session: Session
2134 @param session: Optional SQLA session object (a temporary one will be
2135 generated if not supplied)
2138 @return: Section object for the given section name
2141 q = session.query(Section).filter_by(section=section)
2145 except NoResultFound:
2148 __all__.append('get_section')
2151 def get_sections(session=None):
2153 Returns dictionary of section names -> id mappings
2155 @type session: Session
2156 @param session: Optional SQL session object (a temporary one will be
2157 generated if not supplied)
2160 @return: dictionary of section names -> id mappings
2164 q = session.query(Section)
2166 ret[x.section] = x.section_id
2170 __all__.append('get_sections')
2172 ################################################################################
2174 class DBSource(ORMObject):
2175 def __init__(self, source = None, version = None, maintainer = None, \
2176 changedby = None, poolfile = None, install_date = None):
2177 self.source = source
2178 self.version = version
2179 self.maintainer = maintainer
2180 self.changedby = changedby
2181 self.poolfile = poolfile
2182 self.install_date = install_date
2184 def properties(self):
2185 return ['source', 'source_id', 'maintainer', 'changedby', \
2186 'fingerprint', 'poolfile', 'version', 'suites_count', \
2187 'install_date', 'binaries_count']
2189 def not_null_constraints(self):
2190 return ['source', 'version', 'install_date', 'maintainer', \
2191 'changedby', 'poolfile', 'install_date']
2193 __all__.append('DBSource')
2196 def source_exists(source, source_version, suites = ["any"], session=None):
2198 Ensure that source exists somewhere in the archive for the binary
2199 upload being processed.
2200 1. exact match => 1.0-3
2201 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2203 @type source: string
2204 @param source: source name
2206 @type source_version: string
2207 @param source_version: expected source version
2210 @param suites: list of suites to check in, default I{any}
2212 @type session: Session
2213 @param session: Optional SQLA session object (a temporary one will be
2214 generated if not supplied)
2217 @return: returns 1 if a source with expected version is found, otherwise 0
2224 from daklib.regexes import re_bin_only_nmu
2225 orig_source_version = re_bin_only_nmu.sub('', source_version)
2227 for suite in suites:
2228 q = session.query(DBSource).filter_by(source=source). \
2229 filter(DBSource.version.in_([source_version, orig_source_version]))
2231 # source must exist in suite X, or in some other suite that's
2232 # mapped to X, recursively... silent-maps are counted too,
2233 # unreleased-maps aren't.
2234 maps = cnf.ValueList("SuiteMappings")[:]
2236 maps = [ m.split() for m in maps ]
2237 maps = [ (x[1], x[2]) for x in maps
2238 if x[0] == "map" or x[0] == "silent-map" ]
2241 if x[1] in s and x[0] not in s:
2244 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2249 # No source found so return not ok
2254 __all__.append('source_exists')
2257 def get_suites_source_in(source, session=None):
2259 Returns list of Suite objects which given C{source} name is in
2262 @param source: DBSource package name to search for
2265 @return: list of Suite objects for the given source
2268 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2270 __all__.append('get_suites_source_in')
2273 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2275 Returns list of DBSource objects for given C{source} name and other parameters
2278 @param source: DBSource package name to search for
2280 @type version: str or None
2281 @param version: DBSource version name to search for or None if not applicable
2283 @type dm_upload_allowed: bool
2284 @param dm_upload_allowed: If None, no effect. If True or False, only
2285 return packages with that dm_upload_allowed setting
2287 @type session: Session
2288 @param session: Optional SQL session object (a temporary one will be
2289 generated if not supplied)
2292 @return: list of DBSource objects for the given name (may be empty)
2295 q = session.query(DBSource).filter_by(source=source)
2297 if version is not None:
2298 q = q.filter_by(version=version)
2300 if dm_upload_allowed is not None:
2301 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2305 __all__.append('get_sources_from_name')
2307 # FIXME: This function fails badly if it finds more than 1 source package and
2308 # its implementation is trivial enough to be inlined.
2310 def get_source_in_suite(source, suite, session=None):
2312 Returns a DBSource object for a combination of C{source} and C{suite}.
2314 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2315 - B{suite} - a suite name, eg. I{unstable}
2317 @type source: string
2318 @param source: source package name
2321 @param suite: the suite name
2324 @return: the version for I{source} in I{suite}
2328 q = get_suite(suite, session).get_sources(source)
2331 except NoResultFound:
2334 __all__.append('get_source_in_suite')
2336 ################################################################################
2339 def add_dsc_to_db(u, filename, session=None):
2340 entry = u.pkg.files[filename]
2344 source.source = u.pkg.dsc["source"]
2345 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2346 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2347 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2348 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2349 source.install_date = datetime.now().date()
2351 dsc_component = entry["component"]
2352 dsc_location_id = entry["location id"]
2354 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2356 # Set up a new poolfile if necessary
2357 if not entry.has_key("files id") or not entry["files id"]:
2358 filename = entry["pool name"] + filename
2359 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2361 pfs.append(poolfile)
2362 entry["files id"] = poolfile.file_id
2364 source.poolfile_id = entry["files id"]
2367 suite_names = u.pkg.changes["distribution"].keys()
2368 source.suites = session.query(Suite). \
2369 filter(Suite.suite_name.in_(suite_names)).all()
2371 # Add the source files to the DB (files and dsc_files)
2373 dscfile.source_id = source.source_id
2374 dscfile.poolfile_id = entry["files id"]
2375 session.add(dscfile)
2377 for dsc_file, dentry in u.pkg.dsc_files.items():
2379 df.source_id = source.source_id
2381 # If the .orig tarball is already in the pool, it's
2382 # files id is stored in dsc_files by check_dsc().
2383 files_id = dentry.get("files id", None)
2385 # Find the entry in the files hash
2386 # TODO: Bail out here properly
2388 for f, e in u.pkg.files.items():
2393 if files_id is None:
2394 filename = dfentry["pool name"] + dsc_file
2396 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2397 # FIXME: needs to check for -1/-2 and or handle exception
2398 if found and obj is not None:
2399 files_id = obj.file_id
2402 # If still not found, add it
2403 if files_id is None:
2404 # HACK: Force sha1sum etc into dentry
2405 dentry["sha1sum"] = dfentry["sha1sum"]
2406 dentry["sha256sum"] = dfentry["sha256sum"]
2407 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2408 pfs.append(poolfile)
2409 files_id = poolfile.file_id
2411 poolfile = get_poolfile_by_id(files_id, session)
2412 if poolfile is None:
2413 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2414 pfs.append(poolfile)
2416 df.poolfile_id = files_id
2419 # Add the src_uploaders to the DB
2420 uploader_ids = [source.maintainer_id]
2421 if u.pkg.dsc.has_key("uploaders"):
2422 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2424 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2427 for up_id in uploader_ids:
2428 if added_ids.has_key(up_id):
2430 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2436 su.maintainer_id = up_id
2437 su.source_id = source.source_id
2442 return source, dsc_component, dsc_location_id, pfs
2444 __all__.append('add_dsc_to_db')
2447 def add_deb_to_db(u, filename, session=None):
2449 Contrary to what you might expect, this routine deals with both
2450 debs and udebs. That info is in 'dbtype', whilst 'type' is
2451 'deb' for both of them
2454 entry = u.pkg.files[filename]
2457 bin.package = entry["package"]
2458 bin.version = entry["version"]
2459 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2460 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2461 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2462 bin.binarytype = entry["dbtype"]
2465 filename = entry["pool name"] + filename
2466 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2467 if not entry.get("location id", None):
2468 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2470 if entry.get("files id", None):
2471 poolfile = get_poolfile_by_id(bin.poolfile_id)
2472 bin.poolfile_id = entry["files id"]
2474 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2475 bin.poolfile_id = entry["files id"] = poolfile.file_id
2478 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2479 if len(bin_sources) != 1:
2480 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2481 (bin.package, bin.version, entry["architecture"],
2482 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2484 bin.source_id = bin_sources[0].source_id
2486 # Add and flush object so it has an ID
2489 suite_names = u.pkg.changes["distribution"].keys()
2490 bin.suites = session.query(Suite). \
2491 filter(Suite.suite_name.in_(suite_names)).all()
2495 # Deal with contents - disabled for now
2496 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2498 # print "REJECT\nCould not determine contents of package %s" % bin.package
2499 # session.rollback()
2500 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2504 __all__.append('add_deb_to_db')
2506 ################################################################################
2508 class SourceACL(object):
2509 def __init__(self, *args, **kwargs):
2513 return '<SourceACL %s>' % self.source_acl_id
2515 __all__.append('SourceACL')
2517 ################################################################################
2519 class SrcFormat(object):
2520 def __init__(self, *args, **kwargs):
2524 return '<SrcFormat %s>' % (self.format_name)
2526 __all__.append('SrcFormat')
2528 ################################################################################
2530 class SrcUploader(object):
2531 def __init__(self, *args, **kwargs):
2535 return '<SrcUploader %s>' % self.uploader_id
2537 __all__.append('SrcUploader')
2539 ################################################################################
2541 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2542 ('SuiteID', 'suite_id'),
2543 ('Version', 'version'),
2544 ('Origin', 'origin'),
2546 ('Description', 'description'),
2547 ('Untouchable', 'untouchable'),
2548 ('Announce', 'announce'),
2549 ('Codename', 'codename'),
2550 ('OverrideCodename', 'overridecodename'),
2551 ('ValidTime', 'validtime'),
2552 ('Priority', 'priority'),
2553 ('NotAutomatic', 'notautomatic'),
2554 ('CopyChanges', 'copychanges'),
2555 ('OverrideSuite', 'overridesuite')]
2557 # Why the heck don't we have any UNIQUE constraints in table suite?
2558 # TODO: Add UNIQUE constraints for appropriate columns.
2559 class Suite(ORMObject):
2560 def __init__(self, suite_name = None, version = None):
2561 self.suite_name = suite_name
2562 self.version = version
2564 def properties(self):
2565 return ['suite_name', 'version', 'sources_count', 'binaries_count']
2567 def not_null_constraints(self):
2568 return ['suite_name', 'version']
2570 def __eq__(self, val):
2571 if isinstance(val, str):
2572 return (self.suite_name == val)
2573 # This signals to use the normal comparison operator
2574 return NotImplemented
2576 def __ne__(self, val):
2577 if isinstance(val, str):
2578 return (self.suite_name != val)
2579 # This signals to use the normal comparison operator
2580 return NotImplemented
2584 for disp, field in SUITE_FIELDS:
2585 val = getattr(self, field, None)
2587 ret.append("%s: %s" % (disp, val))
2589 return "\n".join(ret)
2591 def get_architectures(self, skipsrc=False, skipall=False):
2593 Returns list of Architecture objects
2595 @type skipsrc: boolean
2596 @param skipsrc: Whether to skip returning the 'source' architecture entry
2599 @type skipall: boolean
2600 @param skipall: Whether to skip returning the 'all' architecture entry
2604 @return: list of Architecture objects for the given name (may be empty)
2607 q = object_session(self).query(Architecture).with_parent(self)
2609 q = q.filter(Architecture.arch_string != 'source')
2611 q = q.filter(Architecture.arch_string != 'all')
2612 return q.order_by(Architecture.arch_string).all()
2614 def get_sources(self, source):
2616 Returns a query object representing DBSource that is part of C{suite}.
2618 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2620 @type source: string
2621 @param source: source package name
2623 @rtype: sqlalchemy.orm.query.Query
2624 @return: a query of DBSource
2628 session = object_session(self)
2629 return session.query(DBSource).filter_by(source = source). \
2632 __all__.append('Suite')
2635 def get_suite(suite, session=None):
2637 Returns Suite object for given C{suite name}.
2640 @param suite: The name of the suite
2642 @type session: Session
2643 @param session: Optional SQLA session object (a temporary one will be
2644 generated if not supplied)
2647 @return: Suite object for the requested suite name (None if not present)
2650 q = session.query(Suite).filter_by(suite_name=suite)
2654 except NoResultFound:
2657 __all__.append('get_suite')
2659 ################################################################################
2661 # TODO: should be removed because the implementation is too trivial
2663 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2665 Returns list of Architecture objects for given C{suite} name
2668 @param suite: Suite name to search for
2670 @type skipsrc: boolean
2671 @param skipsrc: Whether to skip returning the 'source' architecture entry
2674 @type skipall: boolean
2675 @param skipall: Whether to skip returning the 'all' architecture entry
2678 @type session: Session
2679 @param session: Optional SQL session object (a temporary one will be
2680 generated if not supplied)
2683 @return: list of Architecture objects for the given name (may be empty)
2686 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2688 __all__.append('get_suite_architectures')
2690 ################################################################################
2692 class SuiteSrcFormat(object):
2693 def __init__(self, *args, **kwargs):
2697 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2699 __all__.append('SuiteSrcFormat')
2702 def get_suite_src_formats(suite, session=None):
2704 Returns list of allowed SrcFormat for C{suite}.
2707 @param suite: Suite name to search for
2709 @type session: Session
2710 @param session: Optional SQL session object (a temporary one will be
2711 generated if not supplied)
2714 @return: the list of allowed source formats for I{suite}
2717 q = session.query(SrcFormat)
2718 q = q.join(SuiteSrcFormat)
2719 q = q.join(Suite).filter_by(suite_name=suite)
2720 q = q.order_by('format_name')
2724 __all__.append('get_suite_src_formats')
2726 ################################################################################
2728 class Uid(ORMObject):
2729 def __init__(self, uid = None, name = None):
2733 def __eq__(self, val):
2734 if isinstance(val, str):
2735 return (self.uid == val)
2736 # This signals to use the normal comparison operator
2737 return NotImplemented
2739 def __ne__(self, val):
2740 if isinstance(val, str):
2741 return (self.uid != val)
2742 # This signals to use the normal comparison operator
2743 return NotImplemented
2745 def properties(self):
2746 return ['uid', 'name', 'fingerprint']
2748 def not_null_constraints(self):
2751 __all__.append('Uid')
2754 def get_or_set_uid(uidname, session=None):
2756 Returns uid object for given uidname.
2758 If no matching uidname is found, a row is inserted.
2760 @type uidname: string
2761 @param uidname: The uid to add
2763 @type session: SQLAlchemy
2764 @param session: Optional SQL session object (a temporary one will be
2765 generated if not supplied). If not passed, a commit will be performed at
2766 the end of the function, otherwise the caller is responsible for commiting.
2769 @return: the uid object for the given uidname
2772 q = session.query(Uid).filter_by(uid=uidname)
2776 except NoResultFound:
2780 session.commit_or_flush()
2785 __all__.append('get_or_set_uid')
2788 def get_uid_from_fingerprint(fpr, session=None):
2789 q = session.query(Uid)
2790 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2794 except NoResultFound:
2797 __all__.append('get_uid_from_fingerprint')
2799 ################################################################################
2801 class UploadBlock(object):
2802 def __init__(self, *args, **kwargs):
2806 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2808 __all__.append('UploadBlock')
2810 ################################################################################
2812 class DBConn(object):
2814 database module init.
2818 def __init__(self, *args, **kwargs):
2819 self.__dict__ = self.__shared_state
2821 if not getattr(self, 'initialised', False):
2822 self.initialised = True
2823 self.debug = kwargs.has_key('debug')
2826 def __setuptables(self):
2827 tables_with_primary = (
2838 'changes_pending_binaries',
2839 'changes_pending_files',
2840 'changes_pending_source',
2850 'pending_bin_contents',
2862 # The following tables have primary keys but sqlalchemy
2863 # version 0.5 fails to reflect them correctly with database
2864 # versions before upgrade #41.
2866 #'build_queue_files',
2869 tables_no_primary = (
2871 'changes_pending_files_map',
2872 'changes_pending_source_files',
2873 'changes_pool_files',
2876 'suite_architectures',
2877 'suite_src_formats',
2878 'suite_build_queue_copy',
2880 # see the comment above
2882 'build_queue_files',
2886 'almost_obsolete_all_associations',
2887 'almost_obsolete_src_associations',
2888 'any_associations_source',
2889 'bin_assoc_by_arch',
2890 'bin_associations_binaries',
2891 'binaries_suite_arch',
2892 'binfiles_suite_component_arch',
2895 'newest_all_associations',
2896 'newest_any_associations',
2898 'newest_src_association',
2899 'obsolete_all_associations',
2900 'obsolete_any_associations',
2901 'obsolete_any_by_all_associations',
2902 'obsolete_src_associations',
2904 'src_associations_bin',
2905 'src_associations_src',
2906 'suite_arch_by_name',
2909 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2910 # correctly and that is why we have to use a workaround. It can
2911 # be removed as soon as we switch to version 0.6.
2912 for table_name in tables_with_primary:
2913 table = Table(table_name, self.db_meta, \
2914 Column('id', Integer, primary_key = True), \
2915 autoload=True, useexisting=True)
2916 setattr(self, 'tbl_%s' % table_name, table)
2918 for table_name in tables_no_primary:
2919 table = Table(table_name, self.db_meta, autoload=True)
2920 setattr(self, 'tbl_%s' % table_name, table)
2922 for view_name in views:
2923 view = Table(view_name, self.db_meta, autoload=True)
2924 setattr(self, 'view_%s' % view_name, view)
2926 def __setupmappers(self):
2927 mapper(Architecture, self.tbl_architecture,
2928 properties = dict(arch_id = self.tbl_architecture.c.id,
2929 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2930 order_by='suite_name',
2931 backref=backref('architectures', order_by='arch_string'))),
2932 extension = validator)
2934 mapper(Archive, self.tbl_archive,
2935 properties = dict(archive_id = self.tbl_archive.c.id,
2936 archive_name = self.tbl_archive.c.name))
2938 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2939 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2940 filename = self.tbl_pending_bin_contents.c.filename,
2941 package = self.tbl_pending_bin_contents.c.package,
2942 version = self.tbl_pending_bin_contents.c.version,
2943 arch = self.tbl_pending_bin_contents.c.arch,
2944 otype = self.tbl_pending_bin_contents.c.type))
2946 mapper(DebContents, self.tbl_deb_contents,
2947 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2948 package=self.tbl_deb_contents.c.package,
2949 suite=self.tbl_deb_contents.c.suite,
2950 arch=self.tbl_deb_contents.c.arch,
2951 section=self.tbl_deb_contents.c.section,
2952 filename=self.tbl_deb_contents.c.filename))
2954 mapper(UdebContents, self.tbl_udeb_contents,
2955 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2956 package=self.tbl_udeb_contents.c.package,
2957 suite=self.tbl_udeb_contents.c.suite,
2958 arch=self.tbl_udeb_contents.c.arch,
2959 section=self.tbl_udeb_contents.c.section,
2960 filename=self.tbl_udeb_contents.c.filename))
2962 mapper(BuildQueue, self.tbl_build_queue,
2963 properties = dict(queue_id = self.tbl_build_queue.c.id))
2965 mapper(BuildQueueFile, self.tbl_build_queue_files,
2966 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2967 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2969 mapper(DBBinary, self.tbl_binaries,
2970 properties = dict(binary_id = self.tbl_binaries.c.id,
2971 package = self.tbl_binaries.c.package,
2972 version = self.tbl_binaries.c.version,
2973 maintainer_id = self.tbl_binaries.c.maintainer,
2974 maintainer = relation(Maintainer),
2975 source_id = self.tbl_binaries.c.source,
2976 source = relation(DBSource, backref='binaries'),
2977 arch_id = self.tbl_binaries.c.architecture,
2978 architecture = relation(Architecture),
2979 poolfile_id = self.tbl_binaries.c.file,
2980 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
2981 binarytype = self.tbl_binaries.c.type,
2982 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2983 fingerprint = relation(Fingerprint),
2984 install_date = self.tbl_binaries.c.install_date,
2985 suites = relation(Suite, secondary=self.tbl_bin_associations,
2986 backref=backref('binaries', lazy='dynamic'))),
2987 extension = validator)
2989 mapper(BinaryACL, self.tbl_binary_acl,
2990 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2992 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2993 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2994 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2995 architecture = relation(Architecture)))
2997 mapper(Component, self.tbl_component,
2998 properties = dict(component_id = self.tbl_component.c.id,
2999 component_name = self.tbl_component.c.name),
3000 extension = validator)
3002 mapper(DBConfig, self.tbl_config,
3003 properties = dict(config_id = self.tbl_config.c.id))
3005 mapper(DSCFile, self.tbl_dsc_files,
3006 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3007 source_id = self.tbl_dsc_files.c.source,
3008 source = relation(DBSource),
3009 poolfile_id = self.tbl_dsc_files.c.file,
3010 poolfile = relation(PoolFile)))
3012 mapper(PoolFile, self.tbl_files,
3013 properties = dict(file_id = self.tbl_files.c.id,
3014 filesize = self.tbl_files.c.size,
3015 location_id = self.tbl_files.c.location,
3016 location = relation(Location,
3017 # using lazy='dynamic' in the back
3018 # reference because we have A LOT of
3019 # files in one location
3020 backref=backref('files', lazy='dynamic'))),
3021 extension = validator)
3023 mapper(Fingerprint, self.tbl_fingerprint,
3024 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3025 uid_id = self.tbl_fingerprint.c.uid,
3026 uid = relation(Uid),
3027 keyring_id = self.tbl_fingerprint.c.keyring,
3028 keyring = relation(Keyring),
3029 source_acl = relation(SourceACL),
3030 binary_acl = relation(BinaryACL)),
3031 extension = validator)
3033 mapper(Keyring, self.tbl_keyrings,
3034 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3035 keyring_id = self.tbl_keyrings.c.id))
3037 mapper(DBChange, self.tbl_changes,
3038 properties = dict(change_id = self.tbl_changes.c.id,
3039 poolfiles = relation(PoolFile,
3040 secondary=self.tbl_changes_pool_files,
3041 backref="changeslinks"),
3042 seen = self.tbl_changes.c.seen,
3043 source = self.tbl_changes.c.source,
3044 binaries = self.tbl_changes.c.binaries,
3045 architecture = self.tbl_changes.c.architecture,
3046 distribution = self.tbl_changes.c.distribution,
3047 urgency = self.tbl_changes.c.urgency,
3048 maintainer = self.tbl_changes.c.maintainer,
3049 changedby = self.tbl_changes.c.changedby,
3050 date = self.tbl_changes.c.date,
3051 version = self.tbl_changes.c.version,
3052 files = relation(ChangePendingFile,
3053 secondary=self.tbl_changes_pending_files_map,
3054 backref="changesfile"),
3055 in_queue_id = self.tbl_changes.c.in_queue,
3056 in_queue = relation(PolicyQueue,
3057 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3058 approved_for_id = self.tbl_changes.c.approved_for))
3060 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3061 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3063 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3064 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3065 filename = self.tbl_changes_pending_files.c.filename,
3066 size = self.tbl_changes_pending_files.c.size,
3067 md5sum = self.tbl_changes_pending_files.c.md5sum,
3068 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3069 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3071 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3072 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3073 change = relation(DBChange),
3074 maintainer = relation(Maintainer,
3075 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3076 changedby = relation(Maintainer,
3077 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3078 fingerprint = relation(Fingerprint),
3079 source_files = relation(ChangePendingFile,
3080 secondary=self.tbl_changes_pending_source_files,
3081 backref="pending_sources")))
3084 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3085 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3086 keyring = relation(Keyring, backref="keyring_acl_map"),
3087 architecture = relation(Architecture)))
3089 mapper(Location, self.tbl_location,
3090 properties = dict(location_id = self.tbl_location.c.id,
3091 component_id = self.tbl_location.c.component,
3092 component = relation(Component, \
3093 backref=backref('location', uselist = False)),
3094 archive_id = self.tbl_location.c.archive,
3095 archive = relation(Archive),
3096 # FIXME: the 'type' column is old cruft and
3097 # should be removed in the future.
3098 archive_type = self.tbl_location.c.type),
3099 extension = validator)
3101 mapper(Maintainer, self.tbl_maintainer,
3102 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3103 maintains_sources = relation(DBSource, backref='maintainer',
3104 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3105 changed_sources = relation(DBSource, backref='changedby',
3106 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3107 extension = validator)
3109 mapper(NewComment, self.tbl_new_comments,
3110 properties = dict(comment_id = self.tbl_new_comments.c.id))
3112 mapper(Override, self.tbl_override,
3113 properties = dict(suite_id = self.tbl_override.c.suite,
3114 suite = relation(Suite),
3115 package = self.tbl_override.c.package,
3116 component_id = self.tbl_override.c.component,
3117 component = relation(Component),
3118 priority_id = self.tbl_override.c.priority,
3119 priority = relation(Priority),
3120 section_id = self.tbl_override.c.section,
3121 section = relation(Section),
3122 overridetype_id = self.tbl_override.c.type,
3123 overridetype = relation(OverrideType)))
3125 mapper(OverrideType, self.tbl_override_type,
3126 properties = dict(overridetype = self.tbl_override_type.c.type,
3127 overridetype_id = self.tbl_override_type.c.id))
3129 mapper(PolicyQueue, self.tbl_policy_queue,
3130 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3132 mapper(Priority, self.tbl_priority,
3133 properties = dict(priority_id = self.tbl_priority.c.id))
3135 mapper(Section, self.tbl_section,
3136 properties = dict(section_id = self.tbl_section.c.id,
3137 section=self.tbl_section.c.section))
3139 mapper(DBSource, self.tbl_source,
3140 properties = dict(source_id = self.tbl_source.c.id,
3141 version = self.tbl_source.c.version,
3142 maintainer_id = self.tbl_source.c.maintainer,
3143 poolfile_id = self.tbl_source.c.file,
3144 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3145 fingerprint_id = self.tbl_source.c.sig_fpr,
3146 fingerprint = relation(Fingerprint),
3147 changedby_id = self.tbl_source.c.changedby,
3148 srcfiles = relation(DSCFile,
3149 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3150 suites = relation(Suite, secondary=self.tbl_src_associations,
3151 backref=backref('sources', lazy='dynamic')),
3152 srcuploaders = relation(SrcUploader)),
3153 extension = validator)
3155 mapper(SourceACL, self.tbl_source_acl,
3156 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3158 mapper(SrcFormat, self.tbl_src_format,
3159 properties = dict(src_format_id = self.tbl_src_format.c.id,
3160 format_name = self.tbl_src_format.c.format_name))
3162 mapper(SrcUploader, self.tbl_src_uploaders,
3163 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3164 source_id = self.tbl_src_uploaders.c.source,
3165 source = relation(DBSource,
3166 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3167 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3168 maintainer = relation(Maintainer,
3169 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3171 mapper(Suite, self.tbl_suite,
3172 properties = dict(suite_id = self.tbl_suite.c.id,
3173 policy_queue = relation(PolicyQueue),
3174 copy_queues = relation(BuildQueue,
3175 secondary=self.tbl_suite_build_queue_copy)),
3176 extension = validator)
3178 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3179 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3180 suite = relation(Suite, backref='suitesrcformats'),
3181 src_format_id = self.tbl_suite_src_formats.c.src_format,
3182 src_format = relation(SrcFormat)))
3184 mapper(Uid, self.tbl_uid,
3185 properties = dict(uid_id = self.tbl_uid.c.id,
3186 fingerprint = relation(Fingerprint)),
3187 extension = validator)
3189 mapper(UploadBlock, self.tbl_upload_blocks,
3190 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3191 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3192 uid = relation(Uid, backref="uploadblocks")))
3194 ## Connection functions
3195 def __createconn(self):
3196 from config import Config
3200 connstr = "postgres://%s" % cnf["DB::Host"]
3201 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3202 connstr += ":%s" % cnf["DB::Port"]
3203 connstr += "/%s" % cnf["DB::Name"]
3206 connstr = "postgres:///%s" % cnf["DB::Name"]
3207 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3208 connstr += "?port=%s" % cnf["DB::Port"]
3210 self.db_pg = create_engine(connstr, echo=self.debug)
3211 self.db_meta = MetaData()
3212 self.db_meta.bind = self.db_pg
3213 self.db_smaker = sessionmaker(bind=self.db_pg,
3217 self.__setuptables()
3218 self.__setupmappers()
3221 return self.db_smaker()
3223 __all__.append('DBConn')