5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
42 from datetime import datetime, timedelta
43 from errno import ENOENT
44 from tempfile import mkstemp, mkdtemp
46 from inspect import getargspec
49 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
50 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
51 from sqlalchemy import types as sqltypes
53 # Don't remove this, we re-export the exceptions to scripts which import us
54 from sqlalchemy.exc import *
55 from sqlalchemy.orm.exc import NoResultFound
57 # Only import Config until Queue stuff is changed to store its config
59 from config import Config
60 from textutils import fix_maintainer
61 from dak_exceptions import NoSourceFieldError
63 # suppress some deprecation warnings in squeeze related to sqlalchemy
65 warnings.filterwarnings('ignore', \
66 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
68 # TODO: sqlalchemy needs some extra configuration to correctly reflect
69 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
70 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
73 ################################################################################
75 # Patch in support for the debversion field type so that it works during
79 # that is for sqlalchemy 0.6
80 UserDefinedType = sqltypes.UserDefinedType
82 # this one for sqlalchemy 0.5
83 UserDefinedType = sqltypes.TypeEngine
85 class DebVersion(UserDefinedType):
86 def get_col_spec(self):
89 def bind_processor(self, dialect):
92 # ' = None' is needed for sqlalchemy 0.5:
93 def result_processor(self, dialect, coltype = None):
96 sa_major_version = sqlalchemy.__version__[0:3]
97 if sa_major_version in ["0.5", "0.6"]:
98 from sqlalchemy.databases import postgres
99 postgres.ischema_names['debversion'] = DebVersion
101 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
103 ################################################################################
105 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
107 ################################################################################
109 def session_wrapper(fn):
111 Wrapper around common ".., session=None):" handling. If the wrapped
112 function is called without passing 'session', we create a local one
113 and destroy it when the function ends.
115 Also attaches a commit_or_flush method to the session; if we created a
116 local session, this is a synonym for session.commit(), otherwise it is a
117 synonym for session.flush().
120 def wrapped(*args, **kwargs):
121 private_transaction = False
123 # Find the session object
124 session = kwargs.get('session')
127 if len(args) <= len(getargspec(fn)[0]) - 1:
128 # No session specified as last argument or in kwargs
129 private_transaction = True
130 session = kwargs['session'] = DBConn().session()
132 # Session is last argument in args
136 session = args[-1] = DBConn().session()
137 private_transaction = True
139 if private_transaction:
140 session.commit_or_flush = session.commit
142 session.commit_or_flush = session.flush
145 return fn(*args, **kwargs)
147 if private_transaction:
148 # We created a session; close it.
151 wrapped.__doc__ = fn.__doc__
152 wrapped.func_name = fn.func_name
156 __all__.append('session_wrapper')
158 ################################################################################
160 class ORMObject(object):
162 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
163 derived classes must implement the summary() method.
166 def properties(self):
168 This method should be implemented by all derived classes and returns a
169 list of the important properties. The properties 'created' and
170 'modified' will be added automatically. A suffix '_count' should be
171 added to properties that are lists or query objects. The most important
172 property name should be returned as the first element in the list
173 because it is used by repr().
179 Returns a JSON representation of the object based on the properties
180 returned from the properties() method.
183 # add created and modified
184 all_properties = self.properties() + ['created', 'modified']
185 for property in all_properties:
186 # check for list or query
187 if property[-6:] == '_count':
188 value = getattr(self, property[:-6])
189 if hasattr(value, '__len__'):
192 elif hasattr(value, 'count'):
194 value = value.count()
196 raise KeyError('Do not understand property %s.' % property)
199 value = getattr(self, property)
203 elif isinstance(value, ORMObject):
204 # use repr() for ORMObject types
207 # we want a string for all other types because json cannot
210 data[property] = value
211 return json.dumps(data)
215 Returns the name of the class.
217 return type(self).__name__
221 Returns a short string representation of the object using the first
222 element from the properties() method.
224 primary_property = self.properties()[0]
225 value = getattr(self, primary_property)
226 return '<%s %s>' % (self.classname(), str(value))
230 Returns a human readable form of the object using the properties()
233 return '<%s %s>' % (self.classname(), self.json())
235 __all__.append('ORMObject')
237 ################################################################################
239 class Architecture(ORMObject):
240 def __init__(self, arch_string = None, description = None):
241 self.arch_string = arch_string
242 self.description = description
244 def __eq__(self, val):
245 if isinstance(val, str):
246 return (self.arch_string== val)
247 # This signals to use the normal comparison operator
248 return NotImplemented
250 def __ne__(self, val):
251 if isinstance(val, str):
252 return (self.arch_string != val)
253 # This signals to use the normal comparison operator
254 return NotImplemented
256 def properties(self):
257 return ['arch_string', 'arch_id', 'suites_count']
259 __all__.append('Architecture')
262 def get_architecture(architecture, session=None):
264 Returns database id for given C{architecture}.
266 @type architecture: string
267 @param architecture: The name of the architecture
269 @type session: Session
270 @param session: Optional SQLA session object (a temporary one will be
271 generated if not supplied)
274 @return: Architecture object for the given arch (None if not present)
277 q = session.query(Architecture).filter_by(arch_string=architecture)
281 except NoResultFound:
284 __all__.append('get_architecture')
286 # TODO: should be removed because the implementation is too trivial
288 def get_architecture_suites(architecture, session=None):
290 Returns list of Suite objects for given C{architecture} name
292 @type architecture: str
293 @param architecture: Architecture name to search for
295 @type session: Session
296 @param session: Optional SQL session object (a temporary one will be
297 generated if not supplied)
300 @return: list of Suite objects for the given name (may be empty)
303 return get_architecture(architecture, session).suites
305 __all__.append('get_architecture_suites')
307 ################################################################################
309 class Archive(object):
310 def __init__(self, *args, **kwargs):
314 return '<Archive %s>' % self.archive_name
316 __all__.append('Archive')
319 def get_archive(archive, session=None):
321 returns database id for given C{archive}.
323 @type archive: string
324 @param archive: the name of the arhive
326 @type session: Session
327 @param session: Optional SQLA session object (a temporary one will be
328 generated if not supplied)
331 @return: Archive object for the given name (None if not present)
334 archive = archive.lower()
336 q = session.query(Archive).filter_by(archive_name=archive)
340 except NoResultFound:
343 __all__.append('get_archive')
345 ################################################################################
347 class BinAssociation(object):
348 def __init__(self, *args, **kwargs):
352 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
354 __all__.append('BinAssociation')
356 ################################################################################
358 class BinContents(object):
359 def __init__(self, *args, **kwargs):
363 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
365 __all__.append('BinContents')
367 ################################################################################
369 class DBBinary(object):
370 def __init__(self, *args, **kwargs):
374 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
376 __all__.append('DBBinary')
379 def get_suites_binary_in(package, session=None):
381 Returns list of Suite objects which given C{package} name is in
384 @param package: DBBinary package name to search for
387 @return: list of Suite objects for the given package
390 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
392 __all__.append('get_suites_binary_in')
395 def get_binary_from_id(binary_id, session=None):
397 Returns DBBinary object for given C{id}
400 @param binary_id: Id of the required binary
402 @type session: Session
403 @param session: Optional SQLA session object (a temporary one will be
404 generated if not supplied)
407 @return: DBBinary object for the given binary (None if not present)
410 q = session.query(DBBinary).filter_by(binary_id=binary_id)
414 except NoResultFound:
417 __all__.append('get_binary_from_id')
420 def get_binaries_from_name(package, version=None, architecture=None, session=None):
422 Returns list of DBBinary objects for given C{package} name
425 @param package: DBBinary package name to search for
427 @type version: str or None
428 @param version: Version to search for (or None)
430 @type architecture: str, list or None
431 @param architecture: Architectures to limit to (or None if no limit)
433 @type session: Session
434 @param session: Optional SQL session object (a temporary one will be
435 generated if not supplied)
438 @return: list of DBBinary objects for the given name (may be empty)
441 q = session.query(DBBinary).filter_by(package=package)
443 if version is not None:
444 q = q.filter_by(version=version)
446 if architecture is not None:
447 if not isinstance(architecture, list):
448 architecture = [architecture]
449 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
455 __all__.append('get_binaries_from_name')
458 def get_binaries_from_source_id(source_id, session=None):
460 Returns list of DBBinary objects for given C{source_id}
463 @param source_id: source_id to search for
465 @type session: Session
466 @param session: Optional SQL session object (a temporary one will be
467 generated if not supplied)
470 @return: list of DBBinary objects for the given name (may be empty)
473 return session.query(DBBinary).filter_by(source_id=source_id).all()
475 __all__.append('get_binaries_from_source_id')
478 def get_binary_from_name_suite(package, suitename, session=None):
479 ### For dak examine-package
480 ### XXX: Doesn't use object API yet
482 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
483 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
484 WHERE b.package='%(package)s'
486 AND fi.location = l.id
487 AND l.component = c.id
490 AND su.suite_name %(suitename)s
491 ORDER BY b.version DESC"""
493 return session.execute(sql % {'package': package, 'suitename': suitename})
495 __all__.append('get_binary_from_name_suite')
498 def get_binary_components(package, suitename, arch, session=None):
499 # Check for packages that have moved from one component to another
500 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
501 WHERE b.package=:package AND s.suite_name=:suitename
502 AND (a.arch_string = :arch OR a.arch_string = 'all')
503 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
504 AND f.location = l.id
505 AND l.component = c.id
508 vals = {'package': package, 'suitename': suitename, 'arch': arch}
510 return session.execute(query, vals)
512 __all__.append('get_binary_components')
514 ################################################################################
516 class BinaryACL(object):
517 def __init__(self, *args, **kwargs):
521 return '<BinaryACL %s>' % self.binary_acl_id
523 __all__.append('BinaryACL')
525 ################################################################################
527 class BinaryACLMap(object):
528 def __init__(self, *args, **kwargs):
532 return '<BinaryACLMap %s>' % self.binary_acl_map_id
534 __all__.append('BinaryACLMap')
536 ################################################################################
541 ArchiveDir "%(archivepath)s";
542 OverrideDir "%(overridedir)s";
543 CacheDir "%(cachedir)s";
548 Packages::Compress ". bzip2 gzip";
549 Sources::Compress ". bzip2 gzip";
554 bindirectory "incoming"
559 BinOverride "override.sid.all3";
560 BinCacheDB "packages-accepted.db";
562 FileList "%(filelist)s";
565 Packages::Extensions ".deb .udeb";
568 bindirectory "incoming/"
571 BinOverride "override.sid.all3";
572 SrcOverride "override.sid.all3.src";
573 FileList "%(filelist)s";
577 class BuildQueue(object):
578 def __init__(self, *args, **kwargs):
582 return '<BuildQueue %s>' % self.queue_name
584 def write_metadata(self, starttime, force=False):
585 # Do we write out metafiles?
586 if not (force or self.generate_metadata):
589 session = DBConn().session().object_session(self)
591 fl_fd = fl_name = ac_fd = ac_name = None
593 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
594 startdir = os.getcwd()
597 # Grab files we want to include
598 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
599 # Write file list with newer files
600 (fl_fd, fl_name) = mkstemp()
602 os.write(fl_fd, '%s\n' % n.fullpath)
607 # Write minimal apt.conf
608 # TODO: Remove hardcoding from template
609 (ac_fd, ac_name) = mkstemp()
610 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
612 'cachedir': cnf["Dir::Cache"],
613 'overridedir': cnf["Dir::Override"],
617 # Run apt-ftparchive generate
618 os.chdir(os.path.dirname(ac_name))
619 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
621 # Run apt-ftparchive release
622 # TODO: Eww - fix this
623 bname = os.path.basename(self.path)
627 # We have to remove the Release file otherwise it'll be included in the
630 os.unlink(os.path.join(bname, 'Release'))
634 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
636 # Crude hack with open and append, but this whole section is and should be redone.
637 if self.notautomatic:
638 release=open("Release", "a")
639 release.write("NotAutomatic: yes")
644 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
645 if cnf.has_key("Dinstall::SigningPubKeyring"):
646 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
648 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
650 # Move the files if we got this far
651 os.rename('Release', os.path.join(bname, 'Release'))
653 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
655 # Clean up any left behind files
682 def clean_and_update(self, starttime, Logger, dryrun=False):
683 """WARNING: This routine commits for you"""
684 session = DBConn().session().object_session(self)
686 if self.generate_metadata and not dryrun:
687 self.write_metadata(starttime)
689 # Grab files older than our execution time
690 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
696 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
698 Logger.log(["I: Removing %s from the queue" % o.fullpath])
699 os.unlink(o.fullpath)
702 # If it wasn't there, don't worry
703 if e.errno == ENOENT:
706 # TODO: Replace with proper logging call
707 Logger.log(["E: Could not remove %s" % o.fullpath])
714 for f in os.listdir(self.path):
715 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
719 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
720 except NoResultFound:
721 fp = os.path.join(self.path, f)
723 Logger.log(["I: Would remove unused link %s" % fp])
725 Logger.log(["I: Removing unused link %s" % fp])
729 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
731 def add_file_from_pool(self, poolfile):
732 """Copies a file into the pool. Assumes that the PoolFile object is
733 attached to the same SQLAlchemy session as the Queue object is.
735 The caller is responsible for committing after calling this function."""
736 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
738 # Check if we have a file of this name or this ID already
739 for f in self.queuefiles:
740 if f.fileid is not None and f.fileid == poolfile.file_id or \
741 f.poolfile.filename == poolfile_basename:
742 # In this case, update the BuildQueueFile entry so we
743 # don't remove it too early
744 f.lastused = datetime.now()
745 DBConn().session().object_session(poolfile).add(f)
748 # Prepare BuildQueueFile object
749 qf = BuildQueueFile()
750 qf.build_queue_id = self.queue_id
751 qf.lastused = datetime.now()
752 qf.filename = poolfile_basename
754 targetpath = poolfile.fullpath
755 queuepath = os.path.join(self.path, poolfile_basename)
759 # We need to copy instead of symlink
761 utils.copy(targetpath, queuepath)
762 # NULL in the fileid field implies a copy
765 os.symlink(targetpath, queuepath)
766 qf.fileid = poolfile.file_id
770 # Get the same session as the PoolFile is using and add the qf to it
771 DBConn().session().object_session(poolfile).add(qf)
776 __all__.append('BuildQueue')
779 def get_build_queue(queuename, session=None):
781 Returns BuildQueue object for given C{queue name}, creating it if it does not
784 @type queuename: string
785 @param queuename: The name of the queue
787 @type session: Session
788 @param session: Optional SQLA session object (a temporary one will be
789 generated if not supplied)
792 @return: BuildQueue object for the given queue
795 q = session.query(BuildQueue).filter_by(queue_name=queuename)
799 except NoResultFound:
802 __all__.append('get_build_queue')
804 ################################################################################
806 class BuildQueueFile(object):
807 def __init__(self, *args, **kwargs):
811 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
815 return os.path.join(self.buildqueue.path, self.filename)
818 __all__.append('BuildQueueFile')
820 ################################################################################
822 class ChangePendingBinary(object):
823 def __init__(self, *args, **kwargs):
827 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
829 __all__.append('ChangePendingBinary')
831 ################################################################################
833 class ChangePendingFile(object):
834 def __init__(self, *args, **kwargs):
838 return '<ChangePendingFile %s>' % self.change_pending_file_id
840 __all__.append('ChangePendingFile')
842 ################################################################################
844 class ChangePendingSource(object):
845 def __init__(self, *args, **kwargs):
849 return '<ChangePendingSource %s>' % self.change_pending_source_id
851 __all__.append('ChangePendingSource')
853 ################################################################################
855 class Component(object):
856 def __init__(self, *args, **kwargs):
859 def __eq__(self, val):
860 if isinstance(val, str):
861 return (self.component_name == val)
862 # This signals to use the normal comparison operator
863 return NotImplemented
865 def __ne__(self, val):
866 if isinstance(val, str):
867 return (self.component_name != val)
868 # This signals to use the normal comparison operator
869 return NotImplemented
872 return '<Component %s>' % self.component_name
875 __all__.append('Component')
878 def get_component(component, session=None):
880 Returns database id for given C{component}.
882 @type component: string
883 @param component: The name of the override type
886 @return: the database id for the given component
889 component = component.lower()
891 q = session.query(Component).filter_by(component_name=component)
895 except NoResultFound:
898 __all__.append('get_component')
900 ################################################################################
902 class DBConfig(object):
903 def __init__(self, *args, **kwargs):
907 return '<DBConfig %s>' % self.name
909 __all__.append('DBConfig')
911 ################################################################################
914 def get_or_set_contents_file_id(filename, session=None):
916 Returns database id for given filename.
918 If no matching file is found, a row is inserted.
920 @type filename: string
921 @param filename: The filename
922 @type session: SQLAlchemy
923 @param session: Optional SQL session object (a temporary one will be
924 generated if not supplied). If not passed, a commit will be performed at
925 the end of the function, otherwise the caller is responsible for commiting.
928 @return: the database id for the given component
931 q = session.query(ContentFilename).filter_by(filename=filename)
934 ret = q.one().cafilename_id
935 except NoResultFound:
936 cf = ContentFilename()
937 cf.filename = filename
939 session.commit_or_flush()
940 ret = cf.cafilename_id
944 __all__.append('get_or_set_contents_file_id')
947 def get_contents(suite, overridetype, section=None, session=None):
949 Returns contents for a suite / overridetype combination, limiting
950 to a section if not None.
953 @param suite: Suite object
955 @type overridetype: OverrideType
956 @param overridetype: OverrideType object
958 @type section: Section
959 @param section: Optional section object to limit results to
961 @type session: SQLAlchemy
962 @param session: Optional SQL session object (a temporary one will be
963 generated if not supplied)
966 @return: ResultsProxy object set up to return tuples of (filename, section,
970 # find me all of the contents for a given suite
971 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
975 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
976 JOIN content_file_names n ON (c.filename=n.id)
977 JOIN binaries b ON (b.id=c.binary_pkg)
978 JOIN override o ON (o.package=b.package)
979 JOIN section s ON (s.id=o.section)
980 WHERE o.suite = :suiteid AND o.type = :overridetypeid
981 AND b.type=:overridetypename"""
983 vals = {'suiteid': suite.suite_id,
984 'overridetypeid': overridetype.overridetype_id,
985 'overridetypename': overridetype.overridetype}
987 if section is not None:
988 contents_q += " AND s.id = :sectionid"
989 vals['sectionid'] = section.section_id
991 contents_q += " ORDER BY fn"
993 return session.execute(contents_q, vals)
995 __all__.append('get_contents')
997 ################################################################################
999 class ContentFilepath(object):
1000 def __init__(self, *args, **kwargs):
1004 return '<ContentFilepath %s>' % self.filepath
1006 __all__.append('ContentFilepath')
1009 def get_or_set_contents_path_id(filepath, session=None):
1011 Returns database id for given path.
1013 If no matching file is found, a row is inserted.
1015 @type filepath: string
1016 @param filepath: The filepath
1018 @type session: SQLAlchemy
1019 @param session: Optional SQL session object (a temporary one will be
1020 generated if not supplied). If not passed, a commit will be performed at
1021 the end of the function, otherwise the caller is responsible for commiting.
1024 @return: the database id for the given path
1027 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1030 ret = q.one().cafilepath_id
1031 except NoResultFound:
1032 cf = ContentFilepath()
1033 cf.filepath = filepath
1035 session.commit_or_flush()
1036 ret = cf.cafilepath_id
1040 __all__.append('get_or_set_contents_path_id')
1042 ################################################################################
1044 class ContentAssociation(object):
1045 def __init__(self, *args, **kwargs):
1049 return '<ContentAssociation %s>' % self.ca_id
1051 __all__.append('ContentAssociation')
1053 def insert_content_paths(binary_id, fullpaths, session=None):
1055 Make sure given path is associated with given binary id
1057 @type binary_id: int
1058 @param binary_id: the id of the binary
1059 @type fullpaths: list
1060 @param fullpaths: the list of paths of the file being associated with the binary
1061 @type session: SQLAlchemy session
1062 @param session: Optional SQLAlchemy session. If this is passed, the caller
1063 is responsible for ensuring a transaction has begun and committing the
1064 results or rolling back based on the result code. If not passed, a commit
1065 will be performed at the end of the function, otherwise the caller is
1066 responsible for commiting.
1068 @return: True upon success
1071 privatetrans = False
1073 session = DBConn().session()
1078 def generate_path_dicts():
1079 for fullpath in fullpaths:
1080 if fullpath.startswith( './' ):
1081 fullpath = fullpath[2:]
1083 yield {'filename':fullpath, 'id': binary_id }
1085 for d in generate_path_dicts():
1086 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1095 traceback.print_exc()
1097 # Only rollback if we set up the session ourself
1104 __all__.append('insert_content_paths')
1106 ################################################################################
1108 class DSCFile(object):
1109 def __init__(self, *args, **kwargs):
1113 return '<DSCFile %s>' % self.dscfile_id
1115 __all__.append('DSCFile')
1118 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1120 Returns a list of DSCFiles which may be empty
1122 @type dscfile_id: int (optional)
1123 @param dscfile_id: the dscfile_id of the DSCFiles to find
1125 @type source_id: int (optional)
1126 @param source_id: the source id related to the DSCFiles to find
1128 @type poolfile_id: int (optional)
1129 @param poolfile_id: the poolfile id related to the DSCFiles to find
1132 @return: Possibly empty list of DSCFiles
1135 q = session.query(DSCFile)
1137 if dscfile_id is not None:
1138 q = q.filter_by(dscfile_id=dscfile_id)
1140 if source_id is not None:
1141 q = q.filter_by(source_id=source_id)
1143 if poolfile_id is not None:
1144 q = q.filter_by(poolfile_id=poolfile_id)
1148 __all__.append('get_dscfiles')
1150 ################################################################################
1152 class PoolFile(ORMObject):
1153 def __init__(self, filename = None, location = None, filesize = -1, \
1155 self.filename = filename
1156 self.location = location
1157 self.filesize = filesize
1158 self.md5sum = md5sum
1162 return os.path.join(self.location.path, self.filename)
1164 def is_valid(self, filesize = -1, md5sum = None):\
1165 return self.filesize == filesize and self.md5sum == md5sum
1167 def properties(self):
1168 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1169 'sha256sum', 'location', 'source', 'last_used']
1171 __all__.append('PoolFile')
1174 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1177 (ValidFileFound [boolean], PoolFile object or None)
1179 @type filename: string
1180 @param filename: the filename of the file to check against the DB
1183 @param filesize: the size of the file to check against the DB
1185 @type md5sum: string
1186 @param md5sum: the md5sum of the file to check against the DB
1188 @type location_id: int
1189 @param location_id: the id of the location to look in
1192 @return: Tuple of length 2.
1193 - If valid pool file found: (C{True}, C{PoolFile object})
1194 - If valid pool file not found:
1195 - (C{False}, C{None}) if no file found
1196 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1199 poolfile = session.query(Location).get(location_id). \
1200 files.filter_by(filename=filename).first()
1202 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1205 return (valid, poolfile)
1207 __all__.append('check_poolfile')
1209 # TODO: the implementation can trivially be inlined at the place where the
1210 # function is called
1212 def get_poolfile_by_id(file_id, session=None):
1214 Returns a PoolFile objects or None for the given id
1217 @param file_id: the id of the file to look for
1219 @rtype: PoolFile or None
1220 @return: either the PoolFile object or None
1223 return session.query(PoolFile).get(file_id)
1225 __all__.append('get_poolfile_by_id')
1228 def get_poolfile_like_name(filename, session=None):
1230 Returns an array of PoolFile objects which are like the given name
1232 @type filename: string
1233 @param filename: the filename of the file to check against the DB
1236 @return: array of PoolFile objects
1239 # TODO: There must be a way of properly using bind parameters with %FOO%
1240 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1244 __all__.append('get_poolfile_like_name')
1247 def add_poolfile(filename, datadict, location_id, session=None):
1249 Add a new file to the pool
1251 @type filename: string
1252 @param filename: filename
1254 @type datadict: dict
1255 @param datadict: dict with needed data
1257 @type location_id: int
1258 @param location_id: database id of the location
1261 @return: the PoolFile object created
1263 poolfile = PoolFile()
1264 poolfile.filename = filename
1265 poolfile.filesize = datadict["size"]
1266 poolfile.md5sum = datadict["md5sum"]
1267 poolfile.sha1sum = datadict["sha1sum"]
1268 poolfile.sha256sum = datadict["sha256sum"]
1269 poolfile.location_id = location_id
1271 session.add(poolfile)
1272 # Flush to get a file id (NB: This is not a commit)
1277 __all__.append('add_poolfile')
1279 ################################################################################
1281 class Fingerprint(object):
1282 def __init__(self, fingerprint = None):
1283 self.fingerprint = fingerprint
1286 return '<Fingerprint %s>' % self.fingerprint
1288 __all__.append('Fingerprint')
1291 def get_fingerprint(fpr, session=None):
1293 Returns Fingerprint object for given fpr.
1296 @param fpr: The fpr to find / add
1298 @type session: SQLAlchemy
1299 @param session: Optional SQL session object (a temporary one will be
1300 generated if not supplied).
1303 @return: the Fingerprint object for the given fpr or None
1306 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1310 except NoResultFound:
1315 __all__.append('get_fingerprint')
1318 def get_or_set_fingerprint(fpr, session=None):
1320 Returns Fingerprint object for given fpr.
1322 If no matching fpr is found, a row is inserted.
1325 @param fpr: The fpr to find / add
1327 @type session: SQLAlchemy
1328 @param session: Optional SQL session object (a temporary one will be
1329 generated if not supplied). If not passed, a commit will be performed at
1330 the end of the function, otherwise the caller is responsible for commiting.
1331 A flush will be performed either way.
1334 @return: the Fingerprint object for the given fpr
1337 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1341 except NoResultFound:
1342 fingerprint = Fingerprint()
1343 fingerprint.fingerprint = fpr
1344 session.add(fingerprint)
1345 session.commit_or_flush()
1350 __all__.append('get_or_set_fingerprint')
1352 ################################################################################
1354 # Helper routine for Keyring class
1355 def get_ldap_name(entry):
1357 for k in ["cn", "mn", "sn"]:
1359 if ret and ret[0] != "" and ret[0] != "-":
1361 return " ".join(name)
1363 ################################################################################
1365 class Keyring(object):
1366 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1367 " --with-colons --fingerprint --fingerprint"
1372 def __init__(self, *args, **kwargs):
1376 return '<Keyring %s>' % self.keyring_name
1378 def de_escape_gpg_str(self, txt):
1379 esclist = re.split(r'(\\x..)', txt)
1380 for x in range(1,len(esclist),2):
1381 esclist[x] = "%c" % (int(esclist[x][2:],16))
1382 return "".join(esclist)
1384 def parse_address(self, uid):
1385 """parses uid and returns a tuple of real name and email address"""
1387 (name, address) = email.Utils.parseaddr(uid)
1388 name = re.sub(r"\s*[(].*[)]", "", name)
1389 name = self.de_escape_gpg_str(name)
1392 return (name, address)
1394 def load_keys(self, keyring):
1395 if not self.keyring_id:
1396 raise Exception('Must be initialized with database information')
1398 k = os.popen(self.gpg_invocation % keyring, "r")
1402 for line in k.xreadlines():
1403 field = line.split(":")
1404 if field[0] == "pub":
1407 (name, addr) = self.parse_address(field[9])
1409 self.keys[key]["email"] = addr
1410 self.keys[key]["name"] = name
1411 self.keys[key]["fingerprints"] = []
1413 elif key and field[0] == "sub" and len(field) >= 12:
1414 signingkey = ("s" in field[11])
1415 elif key and field[0] == "uid":
1416 (name, addr) = self.parse_address(field[9])
1417 if "email" not in self.keys[key] and "@" in addr:
1418 self.keys[key]["email"] = addr
1419 self.keys[key]["name"] = name
1420 elif signingkey and field[0] == "fpr":
1421 self.keys[key]["fingerprints"].append(field[9])
1422 self.fpr_lookup[field[9]] = key
1424 def import_users_from_ldap(self, session):
1428 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1429 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1431 l = ldap.open(LDAPServer)
1432 l.simple_bind_s("","")
1433 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1434 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1435 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1437 ldap_fin_uid_id = {}
1444 uid = entry["uid"][0]
1445 name = get_ldap_name(entry)
1446 fingerprints = entry["keyFingerPrint"]
1448 for f in fingerprints:
1449 key = self.fpr_lookup.get(f, None)
1450 if key not in self.keys:
1452 self.keys[key]["uid"] = uid
1456 keyid = get_or_set_uid(uid, session).uid_id
1457 byuid[keyid] = (uid, name)
1458 byname[uid] = (keyid, name)
1460 return (byname, byuid)
1462 def generate_users_from_keyring(self, format, session):
1466 for x in self.keys.keys():
1467 if "email" not in self.keys[x]:
1469 self.keys[x]["uid"] = format % "invalid-uid"
1471 uid = format % self.keys[x]["email"]
1472 keyid = get_or_set_uid(uid, session).uid_id
1473 byuid[keyid] = (uid, self.keys[x]["name"])
1474 byname[uid] = (keyid, self.keys[x]["name"])
1475 self.keys[x]["uid"] = uid
1478 uid = format % "invalid-uid"
1479 keyid = get_or_set_uid(uid, session).uid_id
1480 byuid[keyid] = (uid, "ungeneratable user id")
1481 byname[uid] = (keyid, "ungeneratable user id")
1483 return (byname, byuid)
1485 __all__.append('Keyring')
1488 def get_keyring(keyring, session=None):
1490 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1491 If C{keyring} already has an entry, simply return the existing Keyring
1493 @type keyring: string
1494 @param keyring: the keyring name
1497 @return: the Keyring object for this keyring
1500 q = session.query(Keyring).filter_by(keyring_name=keyring)
1504 except NoResultFound:
1507 __all__.append('get_keyring')
1509 ################################################################################
1511 class KeyringACLMap(object):
1512 def __init__(self, *args, **kwargs):
1516 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1518 __all__.append('KeyringACLMap')
1520 ################################################################################
1522 class DBChange(object):
1523 def __init__(self, *args, **kwargs):
1527 return '<DBChange %s>' % self.changesname
1529 def clean_from_queue(self):
1530 session = DBConn().session().object_session(self)
1532 # Remove changes_pool_files entries
1535 # Remove changes_pending_files references
1538 # Clear out of queue
1539 self.in_queue = None
1540 self.approved_for_id = None
1542 __all__.append('DBChange')
1545 def get_dbchange(filename, session=None):
1547 returns DBChange object for given C{filename}.
1549 @type filename: string
1550 @param filename: the name of the file
1552 @type session: Session
1553 @param session: Optional SQLA session object (a temporary one will be
1554 generated if not supplied)
1557 @return: DBChange object for the given filename (C{None} if not present)
1560 q = session.query(DBChange).filter_by(changesname=filename)
1564 except NoResultFound:
1567 __all__.append('get_dbchange')
1569 ################################################################################
1571 class Location(object):
1572 def __init__(self, path = None):
1574 # the column 'type' should go away, see comment at mapper
1575 self.archive_type = 'pool'
1578 return '<Location %s (%s)>' % (self.path, self.location_id)
1580 __all__.append('Location')
1583 def get_location(location, component=None, archive=None, session=None):
1585 Returns Location object for the given combination of location, component
1588 @type location: string
1589 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1591 @type component: string
1592 @param component: the component name (if None, no restriction applied)
1594 @type archive: string
1595 @param archive: the archive name (if None, no restriction applied)
1597 @rtype: Location / None
1598 @return: Either a Location object or None if one can't be found
1601 q = session.query(Location).filter_by(path=location)
1603 if archive is not None:
1604 q = q.join(Archive).filter_by(archive_name=archive)
1606 if component is not None:
1607 q = q.join(Component).filter_by(component_name=component)
1611 except NoResultFound:
1614 __all__.append('get_location')
1616 ################################################################################
1618 class Maintainer(object):
1619 def __init__(self, name = None):
1623 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1625 def get_split_maintainer(self):
1626 if not hasattr(self, 'name') or self.name is None:
1627 return ('', '', '', '')
1629 return fix_maintainer(self.name.strip())
1631 __all__.append('Maintainer')
1634 def get_or_set_maintainer(name, session=None):
1636 Returns Maintainer object for given maintainer name.
1638 If no matching maintainer name is found, a row is inserted.
1641 @param name: The maintainer name to add
1643 @type session: SQLAlchemy
1644 @param session: Optional SQL session object (a temporary one will be
1645 generated if not supplied). If not passed, a commit will be performed at
1646 the end of the function, otherwise the caller is responsible for commiting.
1647 A flush will be performed either way.
1650 @return: the Maintainer object for the given maintainer
1653 q = session.query(Maintainer).filter_by(name=name)
1656 except NoResultFound:
1657 maintainer = Maintainer()
1658 maintainer.name = name
1659 session.add(maintainer)
1660 session.commit_or_flush()
1665 __all__.append('get_or_set_maintainer')
1668 def get_maintainer(maintainer_id, session=None):
1670 Return the name of the maintainer behind C{maintainer_id} or None if that
1671 maintainer_id is invalid.
1673 @type maintainer_id: int
1674 @param maintainer_id: the id of the maintainer
1677 @return: the Maintainer with this C{maintainer_id}
1680 return session.query(Maintainer).get(maintainer_id)
1682 __all__.append('get_maintainer')
1684 ################################################################################
1686 class NewComment(object):
1687 def __init__(self, *args, **kwargs):
1691 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1693 __all__.append('NewComment')
1696 def has_new_comment(package, version, session=None):
1698 Returns true if the given combination of C{package}, C{version} has a comment.
1700 @type package: string
1701 @param package: name of the package
1703 @type version: string
1704 @param version: package version
1706 @type session: Session
1707 @param session: Optional SQLA session object (a temporary one will be
1708 generated if not supplied)
1714 q = session.query(NewComment)
1715 q = q.filter_by(package=package)
1716 q = q.filter_by(version=version)
1718 return bool(q.count() > 0)
1720 __all__.append('has_new_comment')
1723 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1725 Returns (possibly empty) list of NewComment objects for the given
1728 @type package: string (optional)
1729 @param package: name of the package
1731 @type version: string (optional)
1732 @param version: package version
1734 @type comment_id: int (optional)
1735 @param comment_id: An id of a comment
1737 @type session: Session
1738 @param session: Optional SQLA session object (a temporary one will be
1739 generated if not supplied)
1742 @return: A (possibly empty) list of NewComment objects will be returned
1745 q = session.query(NewComment)
1746 if package is not None: q = q.filter_by(package=package)
1747 if version is not None: q = q.filter_by(version=version)
1748 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1752 __all__.append('get_new_comments')
1754 ################################################################################
1756 class Override(object):
1757 def __init__(self, *args, **kwargs):
1761 return '<Override %s (%s)>' % (self.package, self.suite_id)
1763 __all__.append('Override')
1766 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1768 Returns Override object for the given parameters
1770 @type package: string
1771 @param package: The name of the package
1773 @type suite: string, list or None
1774 @param suite: The name of the suite (or suites if a list) to limit to. If
1775 None, don't limit. Defaults to None.
1777 @type component: string, list or None
1778 @param component: The name of the component (or components if a list) to
1779 limit to. If None, don't limit. Defaults to None.
1781 @type overridetype: string, list or None
1782 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1783 limit to. If None, don't limit. Defaults to None.
1785 @type session: Session
1786 @param session: Optional SQLA session object (a temporary one will be
1787 generated if not supplied)
1790 @return: A (possibly empty) list of Override objects will be returned
1793 q = session.query(Override)
1794 q = q.filter_by(package=package)
1796 if suite is not None:
1797 if not isinstance(suite, list): suite = [suite]
1798 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1800 if component is not None:
1801 if not isinstance(component, list): component = [component]
1802 q = q.join(Component).filter(Component.component_name.in_(component))
1804 if overridetype is not None:
1805 if not isinstance(overridetype, list): overridetype = [overridetype]
1806 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1810 __all__.append('get_override')
1813 ################################################################################
1815 class OverrideType(object):
1816 def __init__(self, *args, **kwargs):
1820 return '<OverrideType %s>' % self.overridetype
1822 __all__.append('OverrideType')
1825 def get_override_type(override_type, session=None):
1827 Returns OverrideType object for given C{override type}.
1829 @type override_type: string
1830 @param override_type: The name of the override type
1832 @type session: Session
1833 @param session: Optional SQLA session object (a temporary one will be
1834 generated if not supplied)
1837 @return: the database id for the given override type
1840 q = session.query(OverrideType).filter_by(overridetype=override_type)
1844 except NoResultFound:
1847 __all__.append('get_override_type')
1849 ################################################################################
1851 class DebContents(object):
1852 def __init__(self, *args, **kwargs):
1856 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1858 __all__.append('DebContents')
1861 class UdebContents(object):
1862 def __init__(self, *args, **kwargs):
1866 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1868 __all__.append('UdebContents')
1870 class PendingBinContents(object):
1871 def __init__(self, *args, **kwargs):
1875 return '<PendingBinContents %s>' % self.contents_id
1877 __all__.append('PendingBinContents')
1879 def insert_pending_content_paths(package,
1884 Make sure given paths are temporarily associated with given
1888 @param package: the package to associate with should have been read in from the binary control file
1889 @type fullpaths: list
1890 @param fullpaths: the list of paths of the file being associated with the binary
1891 @type session: SQLAlchemy session
1892 @param session: Optional SQLAlchemy session. If this is passed, the caller
1893 is responsible for ensuring a transaction has begun and committing the
1894 results or rolling back based on the result code. If not passed, a commit
1895 will be performed at the end of the function
1897 @return: True upon success, False if there is a problem
1900 privatetrans = False
1903 session = DBConn().session()
1907 arch = get_architecture(package['Architecture'], session)
1908 arch_id = arch.arch_id
1910 # Remove any already existing recorded files for this package
1911 q = session.query(PendingBinContents)
1912 q = q.filter_by(package=package['Package'])
1913 q = q.filter_by(version=package['Version'])
1914 q = q.filter_by(architecture=arch_id)
1917 for fullpath in fullpaths:
1919 if fullpath.startswith( "./" ):
1920 fullpath = fullpath[2:]
1922 pca = PendingBinContents()
1923 pca.package = package['Package']
1924 pca.version = package['Version']
1926 pca.architecture = arch_id
1929 pca.type = 8 # gross
1931 pca.type = 7 # also gross
1934 # Only commit if we set up the session ourself
1942 except Exception, e:
1943 traceback.print_exc()
1945 # Only rollback if we set up the session ourself
1952 __all__.append('insert_pending_content_paths')
1954 ################################################################################
1956 class PolicyQueue(object):
1957 def __init__(self, *args, **kwargs):
1961 return '<PolicyQueue %s>' % self.queue_name
1963 __all__.append('PolicyQueue')
1966 def get_policy_queue(queuename, session=None):
1968 Returns PolicyQueue object for given C{queue name}
1970 @type queuename: string
1971 @param queuename: The name of the queue
1973 @type session: Session
1974 @param session: Optional SQLA session object (a temporary one will be
1975 generated if not supplied)
1978 @return: PolicyQueue object for the given queue
1981 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1985 except NoResultFound:
1988 __all__.append('get_policy_queue')
1991 def get_policy_queue_from_path(pathname, session=None):
1993 Returns PolicyQueue object for given C{path name}
1995 @type queuename: string
1996 @param queuename: The path
1998 @type session: Session
1999 @param session: Optional SQLA session object (a temporary one will be
2000 generated if not supplied)
2003 @return: PolicyQueue object for the given queue
2006 q = session.query(PolicyQueue).filter_by(path=pathname)
2010 except NoResultFound:
2013 __all__.append('get_policy_queue_from_path')
2015 ################################################################################
2017 class Priority(object):
2018 def __init__(self, *args, **kwargs):
2021 def __eq__(self, val):
2022 if isinstance(val, str):
2023 return (self.priority == val)
2024 # This signals to use the normal comparison operator
2025 return NotImplemented
2027 def __ne__(self, val):
2028 if isinstance(val, str):
2029 return (self.priority != val)
2030 # This signals to use the normal comparison operator
2031 return NotImplemented
2034 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2036 __all__.append('Priority')
2039 def get_priority(priority, session=None):
2041 Returns Priority object for given C{priority name}.
2043 @type priority: string
2044 @param priority: The name of the priority
2046 @type session: Session
2047 @param session: Optional SQLA session object (a temporary one will be
2048 generated if not supplied)
2051 @return: Priority object for the given priority
2054 q = session.query(Priority).filter_by(priority=priority)
2058 except NoResultFound:
2061 __all__.append('get_priority')
2064 def get_priorities(session=None):
2066 Returns dictionary of priority names -> id mappings
2068 @type session: Session
2069 @param session: Optional SQL session object (a temporary one will be
2070 generated if not supplied)
2073 @return: dictionary of priority names -> id mappings
2077 q = session.query(Priority)
2079 ret[x.priority] = x.priority_id
2083 __all__.append('get_priorities')
2085 ################################################################################
2087 class Section(object):
2088 def __init__(self, *args, **kwargs):
2091 def __eq__(self, val):
2092 if isinstance(val, str):
2093 return (self.section == val)
2094 # This signals to use the normal comparison operator
2095 return NotImplemented
2097 def __ne__(self, val):
2098 if isinstance(val, str):
2099 return (self.section != val)
2100 # This signals to use the normal comparison operator
2101 return NotImplemented
2104 return '<Section %s>' % self.section
2106 __all__.append('Section')
2109 def get_section(section, session=None):
2111 Returns Section object for given C{section name}.
2113 @type section: string
2114 @param section: The name of the section
2116 @type session: Session
2117 @param session: Optional SQLA session object (a temporary one will be
2118 generated if not supplied)
2121 @return: Section object for the given section name
2124 q = session.query(Section).filter_by(section=section)
2128 except NoResultFound:
2131 __all__.append('get_section')
2134 def get_sections(session=None):
2136 Returns dictionary of section names -> id mappings
2138 @type session: Session
2139 @param session: Optional SQL session object (a temporary one will be
2140 generated if not supplied)
2143 @return: dictionary of section names -> id mappings
2147 q = session.query(Section)
2149 ret[x.section] = x.section_id
2153 __all__.append('get_sections')
2155 ################################################################################
2157 class DBSource(object):
2158 def __init__(self, source = None, version = None, maintainer = None, \
2159 changedby = None, poolfile = None, install_date = None):
2160 self.source = source
2161 self.version = version
2162 self.maintainer = maintainer
2163 self.changedby = changedby
2164 self.poolfile = poolfile
2165 self.install_date = install_date
2168 return '<DBSource %s (%s)>' % (self.source, self.version)
2170 __all__.append('DBSource')
2173 def source_exists(source, source_version, suites = ["any"], session=None):
2175 Ensure that source exists somewhere in the archive for the binary
2176 upload being processed.
2177 1. exact match => 1.0-3
2178 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2180 @type source: string
2181 @param source: source name
2183 @type source_version: string
2184 @param source_version: expected source version
2187 @param suites: list of suites to check in, default I{any}
2189 @type session: Session
2190 @param session: Optional SQLA session object (a temporary one will be
2191 generated if not supplied)
2194 @return: returns 1 if a source with expected version is found, otherwise 0
2201 from daklib.regexes import re_bin_only_nmu
2202 orig_source_version = re_bin_only_nmu.sub('', source_version)
2204 for suite in suites:
2205 q = session.query(DBSource).filter_by(source=source). \
2206 filter(DBSource.version.in_([source_version, orig_source_version]))
2208 # source must exist in suite X, or in some other suite that's
2209 # mapped to X, recursively... silent-maps are counted too,
2210 # unreleased-maps aren't.
2211 maps = cnf.ValueList("SuiteMappings")[:]
2213 maps = [ m.split() for m in maps ]
2214 maps = [ (x[1], x[2]) for x in maps
2215 if x[0] == "map" or x[0] == "silent-map" ]
2218 if x[1] in s and x[0] not in s:
2221 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2226 # No source found so return not ok
2231 __all__.append('source_exists')
2234 def get_suites_source_in(source, session=None):
2236 Returns list of Suite objects which given C{source} name is in
2239 @param source: DBSource package name to search for
2242 @return: list of Suite objects for the given source
2245 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2247 __all__.append('get_suites_source_in')
2250 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2252 Returns list of DBSource objects for given C{source} name and other parameters
2255 @param source: DBSource package name to search for
2257 @type version: str or None
2258 @param version: DBSource version name to search for or None if not applicable
2260 @type dm_upload_allowed: bool
2261 @param dm_upload_allowed: If None, no effect. If True or False, only
2262 return packages with that dm_upload_allowed setting
2264 @type session: Session
2265 @param session: Optional SQL session object (a temporary one will be
2266 generated if not supplied)
2269 @return: list of DBSource objects for the given name (may be empty)
2272 q = session.query(DBSource).filter_by(source=source)
2274 if version is not None:
2275 q = q.filter_by(version=version)
2277 if dm_upload_allowed is not None:
2278 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2282 __all__.append('get_sources_from_name')
2284 # FIXME: This function fails badly if it finds more than 1 source package and
2285 # its implementation is trivial enough to be inlined.
2287 def get_source_in_suite(source, suite, session=None):
2289 Returns a DBSource object for a combination of C{source} and C{suite}.
2291 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2292 - B{suite} - a suite name, eg. I{unstable}
2294 @type source: string
2295 @param source: source package name
2298 @param suite: the suite name
2301 @return: the version for I{source} in I{suite}
2305 q = get_suite(suite, session).get_sources(source)
2308 except NoResultFound:
2311 __all__.append('get_source_in_suite')
2313 ################################################################################
2316 def add_dsc_to_db(u, filename, session=None):
2317 entry = u.pkg.files[filename]
2321 source.source = u.pkg.dsc["source"]
2322 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2323 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2324 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2325 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2326 source.install_date = datetime.now().date()
2328 dsc_component = entry["component"]
2329 dsc_location_id = entry["location id"]
2331 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2333 # Set up a new poolfile if necessary
2334 if not entry.has_key("files id") or not entry["files id"]:
2335 filename = entry["pool name"] + filename
2336 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2338 pfs.append(poolfile)
2339 entry["files id"] = poolfile.file_id
2341 source.poolfile_id = entry["files id"]
2344 suite_names = u.pkg.changes["distribution"].keys()
2345 source.suites = session.query(Suite). \
2346 filter(Suite.suite_name.in_(suite_names)).all()
2348 # Add the source files to the DB (files and dsc_files)
2350 dscfile.source_id = source.source_id
2351 dscfile.poolfile_id = entry["files id"]
2352 session.add(dscfile)
2354 for dsc_file, dentry in u.pkg.dsc_files.items():
2356 df.source_id = source.source_id
2358 # If the .orig tarball is already in the pool, it's
2359 # files id is stored in dsc_files by check_dsc().
2360 files_id = dentry.get("files id", None)
2362 # Find the entry in the files hash
2363 # TODO: Bail out here properly
2365 for f, e in u.pkg.files.items():
2370 if files_id is None:
2371 filename = dfentry["pool name"] + dsc_file
2373 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2374 # FIXME: needs to check for -1/-2 and or handle exception
2375 if found and obj is not None:
2376 files_id = obj.file_id
2379 # If still not found, add it
2380 if files_id is None:
2381 # HACK: Force sha1sum etc into dentry
2382 dentry["sha1sum"] = dfentry["sha1sum"]
2383 dentry["sha256sum"] = dfentry["sha256sum"]
2384 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2385 pfs.append(poolfile)
2386 files_id = poolfile.file_id
2388 poolfile = get_poolfile_by_id(files_id, session)
2389 if poolfile is None:
2390 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2391 pfs.append(poolfile)
2393 df.poolfile_id = files_id
2396 # Add the src_uploaders to the DB
2397 uploader_ids = [source.maintainer_id]
2398 if u.pkg.dsc.has_key("uploaders"):
2399 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2401 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2404 for up_id in uploader_ids:
2405 if added_ids.has_key(up_id):
2407 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2413 su.maintainer_id = up_id
2414 su.source_id = source.source_id
2419 return source, dsc_component, dsc_location_id, pfs
2421 __all__.append('add_dsc_to_db')
2424 def add_deb_to_db(u, filename, session=None):
2426 Contrary to what you might expect, this routine deals with both
2427 debs and udebs. That info is in 'dbtype', whilst 'type' is
2428 'deb' for both of them
2431 entry = u.pkg.files[filename]
2434 bin.package = entry["package"]
2435 bin.version = entry["version"]
2436 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2437 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2438 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2439 bin.binarytype = entry["dbtype"]
2442 filename = entry["pool name"] + filename
2443 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2444 if not entry.get("location id", None):
2445 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2447 if entry.get("files id", None):
2448 poolfile = get_poolfile_by_id(bin.poolfile_id)
2449 bin.poolfile_id = entry["files id"]
2451 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2452 bin.poolfile_id = entry["files id"] = poolfile.file_id
2455 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2456 if len(bin_sources) != 1:
2457 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2458 (bin.package, bin.version, entry["architecture"],
2459 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2461 bin.source_id = bin_sources[0].source_id
2463 # Add and flush object so it has an ID
2467 # Add BinAssociations
2468 for suite_name in u.pkg.changes["distribution"].keys():
2469 ba = BinAssociation()
2470 ba.binary_id = bin.binary_id
2471 ba.suite_id = get_suite(suite_name).suite_id
2476 # Deal with contents - disabled for now
2477 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2479 # print "REJECT\nCould not determine contents of package %s" % bin.package
2480 # session.rollback()
2481 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2485 __all__.append('add_deb_to_db')
2487 ################################################################################
2489 class SourceACL(object):
2490 def __init__(self, *args, **kwargs):
2494 return '<SourceACL %s>' % self.source_acl_id
2496 __all__.append('SourceACL')
2498 ################################################################################
2500 class SrcFormat(object):
2501 def __init__(self, *args, **kwargs):
2505 return '<SrcFormat %s>' % (self.format_name)
2507 __all__.append('SrcFormat')
2509 ################################################################################
2511 class SrcUploader(object):
2512 def __init__(self, *args, **kwargs):
2516 return '<SrcUploader %s>' % self.uploader_id
2518 __all__.append('SrcUploader')
2520 ################################################################################
2522 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2523 ('SuiteID', 'suite_id'),
2524 ('Version', 'version'),
2525 ('Origin', 'origin'),
2527 ('Description', 'description'),
2528 ('Untouchable', 'untouchable'),
2529 ('Announce', 'announce'),
2530 ('Codename', 'codename'),
2531 ('OverrideCodename', 'overridecodename'),
2532 ('ValidTime', 'validtime'),
2533 ('Priority', 'priority'),
2534 ('NotAutomatic', 'notautomatic'),
2535 ('CopyChanges', 'copychanges'),
2536 ('OverrideSuite', 'overridesuite')]
2538 # Why the heck don't we have any UNIQUE constraints in table suite?
2539 # TODO: Add UNIQUE constraints for appropriate columns.
2540 class Suite(object):
2541 def __init__(self, suite_name = None, version = None):
2542 self.suite_name = suite_name
2543 self.version = version
2546 return '<Suite %s>' % self.suite_name
2548 def __eq__(self, val):
2549 if isinstance(val, str):
2550 return (self.suite_name == val)
2551 # This signals to use the normal comparison operator
2552 return NotImplemented
2554 def __ne__(self, val):
2555 if isinstance(val, str):
2556 return (self.suite_name != val)
2557 # This signals to use the normal comparison operator
2558 return NotImplemented
2562 for disp, field in SUITE_FIELDS:
2563 val = getattr(self, field, None)
2565 ret.append("%s: %s" % (disp, val))
2567 return "\n".join(ret)
2569 def get_architectures(self, skipsrc=False, skipall=False):
2571 Returns list of Architecture objects
2573 @type skipsrc: boolean
2574 @param skipsrc: Whether to skip returning the 'source' architecture entry
2577 @type skipall: boolean
2578 @param skipall: Whether to skip returning the 'all' architecture entry
2582 @return: list of Architecture objects for the given name (may be empty)
2585 q = object_session(self).query(Architecture).with_parent(self)
2587 q = q.filter(Architecture.arch_string != 'source')
2589 q = q.filter(Architecture.arch_string != 'all')
2590 return q.order_by(Architecture.arch_string).all()
2592 def get_sources(self, source):
2594 Returns a query object representing DBSource that is part of C{suite}.
2596 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2598 @type source: string
2599 @param source: source package name
2601 @rtype: sqlalchemy.orm.query.Query
2602 @return: a query of DBSource
2606 session = object_session(self)
2607 return session.query(DBSource).filter_by(source = source). \
2610 __all__.append('Suite')
2613 def get_suite(suite, session=None):
2615 Returns Suite object for given C{suite name}.
2618 @param suite: The name of the suite
2620 @type session: Session
2621 @param session: Optional SQLA session object (a temporary one will be
2622 generated if not supplied)
2625 @return: Suite object for the requested suite name (None if not present)
2628 q = session.query(Suite).filter_by(suite_name=suite)
2632 except NoResultFound:
2635 __all__.append('get_suite')
2637 ################################################################################
2639 # TODO: should be removed because the implementation is too trivial
2641 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2643 Returns list of Architecture objects for given C{suite} name
2646 @param suite: Suite name to search for
2648 @type skipsrc: boolean
2649 @param skipsrc: Whether to skip returning the 'source' architecture entry
2652 @type skipall: boolean
2653 @param skipall: Whether to skip returning the 'all' architecture entry
2656 @type session: Session
2657 @param session: Optional SQL session object (a temporary one will be
2658 generated if not supplied)
2661 @return: list of Architecture objects for the given name (may be empty)
2664 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2666 __all__.append('get_suite_architectures')
2668 ################################################################################
2670 class SuiteSrcFormat(object):
2671 def __init__(self, *args, **kwargs):
2675 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2677 __all__.append('SuiteSrcFormat')
2680 def get_suite_src_formats(suite, session=None):
2682 Returns list of allowed SrcFormat for C{suite}.
2685 @param suite: Suite name to search for
2687 @type session: Session
2688 @param session: Optional SQL session object (a temporary one will be
2689 generated if not supplied)
2692 @return: the list of allowed source formats for I{suite}
2695 q = session.query(SrcFormat)
2696 q = q.join(SuiteSrcFormat)
2697 q = q.join(Suite).filter_by(suite_name=suite)
2698 q = q.order_by('format_name')
2702 __all__.append('get_suite_src_formats')
2704 ################################################################################
2707 def __init__(self, uid = None, name = None):
2711 def __eq__(self, val):
2712 if isinstance(val, str):
2713 return (self.uid == val)
2714 # This signals to use the normal comparison operator
2715 return NotImplemented
2717 def __ne__(self, val):
2718 if isinstance(val, str):
2719 return (self.uid != val)
2720 # This signals to use the normal comparison operator
2721 return NotImplemented
2724 return '<Uid %s (%s)>' % (self.uid, self.name)
2726 __all__.append('Uid')
2729 def get_or_set_uid(uidname, session=None):
2731 Returns uid object for given uidname.
2733 If no matching uidname is found, a row is inserted.
2735 @type uidname: string
2736 @param uidname: The uid to add
2738 @type session: SQLAlchemy
2739 @param session: Optional SQL session object (a temporary one will be
2740 generated if not supplied). If not passed, a commit will be performed at
2741 the end of the function, otherwise the caller is responsible for commiting.
2744 @return: the uid object for the given uidname
2747 q = session.query(Uid).filter_by(uid=uidname)
2751 except NoResultFound:
2755 session.commit_or_flush()
2760 __all__.append('get_or_set_uid')
2763 def get_uid_from_fingerprint(fpr, session=None):
2764 q = session.query(Uid)
2765 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2769 except NoResultFound:
2772 __all__.append('get_uid_from_fingerprint')
2774 ################################################################################
2776 class UploadBlock(object):
2777 def __init__(self, *args, **kwargs):
2781 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2783 __all__.append('UploadBlock')
2785 ################################################################################
2787 class DBConn(object):
2789 database module init.
2793 def __init__(self, *args, **kwargs):
2794 self.__dict__ = self.__shared_state
2796 if not getattr(self, 'initialised', False):
2797 self.initialised = True
2798 self.debug = kwargs.has_key('debug')
2801 def __setuptables(self):
2802 tables_with_primary = (
2813 'changes_pending_binaries',
2814 'changes_pending_files',
2815 'changes_pending_source',
2825 'pending_bin_contents',
2837 # The following tables have primary keys but sqlalchemy
2838 # version 0.5 fails to reflect them correctly with database
2839 # versions before upgrade #41.
2841 #'build_queue_files',
2844 tables_no_primary = (
2846 'changes_pending_files_map',
2847 'changes_pending_source_files',
2848 'changes_pool_files',
2851 'suite_architectures',
2852 'suite_src_formats',
2853 'suite_build_queue_copy',
2855 # see the comment above
2857 'build_queue_files',
2861 'almost_obsolete_all_associations',
2862 'almost_obsolete_src_associations',
2863 'any_associations_source',
2864 'bin_assoc_by_arch',
2865 'bin_associations_binaries',
2866 'binaries_suite_arch',
2867 'binfiles_suite_component_arch',
2870 'newest_all_associations',
2871 'newest_any_associations',
2873 'newest_src_association',
2874 'obsolete_all_associations',
2875 'obsolete_any_associations',
2876 'obsolete_any_by_all_associations',
2877 'obsolete_src_associations',
2879 'src_associations_bin',
2880 'src_associations_src',
2881 'suite_arch_by_name',
2884 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2885 # correctly and that is why we have to use a workaround. It can
2886 # be removed as soon as we switch to version 0.6.
2887 for table_name in tables_with_primary:
2888 table = Table(table_name, self.db_meta, \
2889 Column('id', Integer, primary_key = True), \
2890 autoload=True, useexisting=True)
2891 setattr(self, 'tbl_%s' % table_name, table)
2893 for table_name in tables_no_primary:
2894 table = Table(table_name, self.db_meta, autoload=True)
2895 setattr(self, 'tbl_%s' % table_name, table)
2897 for view_name in views:
2898 view = Table(view_name, self.db_meta, autoload=True)
2899 setattr(self, 'view_%s' % view_name, view)
2901 def __setupmappers(self):
2902 mapper(Architecture, self.tbl_architecture,
2903 properties = dict(arch_id = self.tbl_architecture.c.id,
2904 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2905 order_by='suite_name',
2906 backref=backref('architectures', order_by='arch_string'))))
2908 mapper(Archive, self.tbl_archive,
2909 properties = dict(archive_id = self.tbl_archive.c.id,
2910 archive_name = self.tbl_archive.c.name))
2912 mapper(BinAssociation, self.tbl_bin_associations,
2913 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2914 suite_id = self.tbl_bin_associations.c.suite,
2915 suite = relation(Suite),
2916 binary_id = self.tbl_bin_associations.c.bin,
2917 binary = relation(DBBinary)))
2919 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2920 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2921 filename = self.tbl_pending_bin_contents.c.filename,
2922 package = self.tbl_pending_bin_contents.c.package,
2923 version = self.tbl_pending_bin_contents.c.version,
2924 arch = self.tbl_pending_bin_contents.c.arch,
2925 otype = self.tbl_pending_bin_contents.c.type))
2927 mapper(DebContents, self.tbl_deb_contents,
2928 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2929 package=self.tbl_deb_contents.c.package,
2930 suite=self.tbl_deb_contents.c.suite,
2931 arch=self.tbl_deb_contents.c.arch,
2932 section=self.tbl_deb_contents.c.section,
2933 filename=self.tbl_deb_contents.c.filename))
2935 mapper(UdebContents, self.tbl_udeb_contents,
2936 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2937 package=self.tbl_udeb_contents.c.package,
2938 suite=self.tbl_udeb_contents.c.suite,
2939 arch=self.tbl_udeb_contents.c.arch,
2940 section=self.tbl_udeb_contents.c.section,
2941 filename=self.tbl_udeb_contents.c.filename))
2943 mapper(BuildQueue, self.tbl_build_queue,
2944 properties = dict(queue_id = self.tbl_build_queue.c.id))
2946 mapper(BuildQueueFile, self.tbl_build_queue_files,
2947 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2948 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2950 mapper(DBBinary, self.tbl_binaries,
2951 properties = dict(binary_id = self.tbl_binaries.c.id,
2952 package = self.tbl_binaries.c.package,
2953 version = self.tbl_binaries.c.version,
2954 maintainer_id = self.tbl_binaries.c.maintainer,
2955 maintainer = relation(Maintainer),
2956 source_id = self.tbl_binaries.c.source,
2957 source = relation(DBSource),
2958 arch_id = self.tbl_binaries.c.architecture,
2959 architecture = relation(Architecture),
2960 poolfile_id = self.tbl_binaries.c.file,
2961 poolfile = relation(PoolFile),
2962 binarytype = self.tbl_binaries.c.type,
2963 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2964 fingerprint = relation(Fingerprint),
2965 install_date = self.tbl_binaries.c.install_date,
2966 binassociations = relation(BinAssociation,
2967 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2969 mapper(BinaryACL, self.tbl_binary_acl,
2970 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2972 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2973 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2974 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2975 architecture = relation(Architecture)))
2977 mapper(Component, self.tbl_component,
2978 properties = dict(component_id = self.tbl_component.c.id,
2979 component_name = self.tbl_component.c.name))
2981 mapper(DBConfig, self.tbl_config,
2982 properties = dict(config_id = self.tbl_config.c.id))
2984 mapper(DSCFile, self.tbl_dsc_files,
2985 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2986 source_id = self.tbl_dsc_files.c.source,
2987 source = relation(DBSource),
2988 poolfile_id = self.tbl_dsc_files.c.file,
2989 poolfile = relation(PoolFile)))
2991 mapper(PoolFile, self.tbl_files,
2992 properties = dict(file_id = self.tbl_files.c.id,
2993 filesize = self.tbl_files.c.size,
2994 location_id = self.tbl_files.c.location,
2995 location = relation(Location,
2996 # using lazy='dynamic' in the back
2997 # reference because we have A LOT of
2998 # files in one location
2999 backref=backref('files', lazy='dynamic'))))
3001 mapper(Fingerprint, self.tbl_fingerprint,
3002 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3003 uid_id = self.tbl_fingerprint.c.uid,
3004 uid = relation(Uid),
3005 keyring_id = self.tbl_fingerprint.c.keyring,
3006 keyring = relation(Keyring),
3007 source_acl = relation(SourceACL),
3008 binary_acl = relation(BinaryACL)))
3010 mapper(Keyring, self.tbl_keyrings,
3011 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3012 keyring_id = self.tbl_keyrings.c.id))
3014 mapper(DBChange, self.tbl_changes,
3015 properties = dict(change_id = self.tbl_changes.c.id,
3016 poolfiles = relation(PoolFile,
3017 secondary=self.tbl_changes_pool_files,
3018 backref="changeslinks"),
3019 seen = self.tbl_changes.c.seen,
3020 source = self.tbl_changes.c.source,
3021 binaries = self.tbl_changes.c.binaries,
3022 architecture = self.tbl_changes.c.architecture,
3023 distribution = self.tbl_changes.c.distribution,
3024 urgency = self.tbl_changes.c.urgency,
3025 maintainer = self.tbl_changes.c.maintainer,
3026 changedby = self.tbl_changes.c.changedby,
3027 date = self.tbl_changes.c.date,
3028 version = self.tbl_changes.c.version,
3029 files = relation(ChangePendingFile,
3030 secondary=self.tbl_changes_pending_files_map,
3031 backref="changesfile"),
3032 in_queue_id = self.tbl_changes.c.in_queue,
3033 in_queue = relation(PolicyQueue,
3034 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3035 approved_for_id = self.tbl_changes.c.approved_for))
3037 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3038 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3040 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3041 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3042 filename = self.tbl_changes_pending_files.c.filename,
3043 size = self.tbl_changes_pending_files.c.size,
3044 md5sum = self.tbl_changes_pending_files.c.md5sum,
3045 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3046 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3048 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3049 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3050 change = relation(DBChange),
3051 maintainer = relation(Maintainer,
3052 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3053 changedby = relation(Maintainer,
3054 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3055 fingerprint = relation(Fingerprint),
3056 source_files = relation(ChangePendingFile,
3057 secondary=self.tbl_changes_pending_source_files,
3058 backref="pending_sources")))
3061 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3062 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3063 keyring = relation(Keyring, backref="keyring_acl_map"),
3064 architecture = relation(Architecture)))
3066 mapper(Location, self.tbl_location,
3067 properties = dict(location_id = self.tbl_location.c.id,
3068 component_id = self.tbl_location.c.component,
3069 component = relation(Component),
3070 archive_id = self.tbl_location.c.archive,
3071 archive = relation(Archive),
3072 # FIXME: the 'type' column is old cruft and
3073 # should be removed in the future.
3074 archive_type = self.tbl_location.c.type))
3076 mapper(Maintainer, self.tbl_maintainer,
3077 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3078 maintains_sources = relation(DBSource, backref='maintainer',
3079 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3080 changed_sources = relation(DBSource, backref='changedby',
3081 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3083 mapper(NewComment, self.tbl_new_comments,
3084 properties = dict(comment_id = self.tbl_new_comments.c.id))
3086 mapper(Override, self.tbl_override,
3087 properties = dict(suite_id = self.tbl_override.c.suite,
3088 suite = relation(Suite),
3089 package = self.tbl_override.c.package,
3090 component_id = self.tbl_override.c.component,
3091 component = relation(Component),
3092 priority_id = self.tbl_override.c.priority,
3093 priority = relation(Priority),
3094 section_id = self.tbl_override.c.section,
3095 section = relation(Section),
3096 overridetype_id = self.tbl_override.c.type,
3097 overridetype = relation(OverrideType)))
3099 mapper(OverrideType, self.tbl_override_type,
3100 properties = dict(overridetype = self.tbl_override_type.c.type,
3101 overridetype_id = self.tbl_override_type.c.id))
3103 mapper(PolicyQueue, self.tbl_policy_queue,
3104 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3106 mapper(Priority, self.tbl_priority,
3107 properties = dict(priority_id = self.tbl_priority.c.id))
3109 mapper(Section, self.tbl_section,
3110 properties = dict(section_id = self.tbl_section.c.id,
3111 section=self.tbl_section.c.section))
3113 mapper(DBSource, self.tbl_source,
3114 properties = dict(source_id = self.tbl_source.c.id,
3115 version = self.tbl_source.c.version,
3116 maintainer_id = self.tbl_source.c.maintainer,
3117 poolfile_id = self.tbl_source.c.file,
3118 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3119 fingerprint_id = self.tbl_source.c.sig_fpr,
3120 fingerprint = relation(Fingerprint),
3121 changedby_id = self.tbl_source.c.changedby,
3122 srcfiles = relation(DSCFile,
3123 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3124 suites = relation(Suite, secondary=self.tbl_src_associations,
3126 srcuploaders = relation(SrcUploader)))
3128 mapper(SourceACL, self.tbl_source_acl,
3129 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3131 mapper(SrcFormat, self.tbl_src_format,
3132 properties = dict(src_format_id = self.tbl_src_format.c.id,
3133 format_name = self.tbl_src_format.c.format_name))
3135 mapper(SrcUploader, self.tbl_src_uploaders,
3136 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3137 source_id = self.tbl_src_uploaders.c.source,
3138 source = relation(DBSource,
3139 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3140 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3141 maintainer = relation(Maintainer,
3142 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3144 mapper(Suite, self.tbl_suite,
3145 properties = dict(suite_id = self.tbl_suite.c.id,
3146 policy_queue = relation(PolicyQueue),
3147 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3149 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3150 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3151 suite = relation(Suite, backref='suitesrcformats'),
3152 src_format_id = self.tbl_suite_src_formats.c.src_format,
3153 src_format = relation(SrcFormat)))
3155 mapper(Uid, self.tbl_uid,
3156 properties = dict(uid_id = self.tbl_uid.c.id,
3157 fingerprint = relation(Fingerprint)))
3159 mapper(UploadBlock, self.tbl_upload_blocks,
3160 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3161 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3162 uid = relation(Uid, backref="uploadblocks")))
3164 ## Connection functions
3165 def __createconn(self):
3166 from config import Config
3170 connstr = "postgres://%s" % cnf["DB::Host"]
3171 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3172 connstr += ":%s" % cnf["DB::Port"]
3173 connstr += "/%s" % cnf["DB::Name"]
3176 connstr = "postgres:///%s" % cnf["DB::Name"]
3177 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3178 connstr += "?port=%s" % cnf["DB::Port"]
3180 self.db_pg = create_engine(connstr, echo=self.debug)
3181 self.db_meta = MetaData()
3182 self.db_meta.bind = self.db_pg
3183 self.db_smaker = sessionmaker(bind=self.db_pg,
3187 self.__setuptables()
3188 self.__setupmappers()
3191 return self.db_smaker()
3193 __all__.append('DBConn')