]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
change to not using autocommit by default
[dak.git] / daklib / dbconn.py
index c290b60f84a8f0c9f789c72c3843e7af58db516f..781e648719a8957d87893fcea07d9334c2f1d406 100755 (executable)
@@ -37,222 +37,782 @@ import os
 import psycopg2
 import traceback
 
+from sqlalchemy import create_engine, Table, MetaData, select
+from sqlalchemy.orm import sessionmaker, mapper, relation
+
+# Don't remove this, we re-export the exceptions to scripts which import us
+from sqlalchemy.exc import *
+
 from singleton import Singleton
-from config import Config
 
 ################################################################################
 
-class Cache(object):
-    def __init__(self, hashfunc=None):
-        if hashfunc:
-            self.hashfunc = hashfunc
-        else:
-            self.hashfunc = lambda x: x['value']
+class Architecture(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        self.data = {}
+    def __repr__(self):
+        return '<Architecture %s>' % self.arch_string
 
-    def SetValue(self, keys, value):
-        self.data[self.hashfunc(keys)] = value
+def get_architecture(architecture, session=None):
+    """
+    Returns database id for given C{architecture}.
 
-    def GetValue(self, keys):
-        return self.data.get(self.hashfunc(keys))
+    @type architecture: string
+    @param architecture: The name of the architecture
 
-################################################################################
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: Architecture
+    @return: Architecture object for the given arch (None if not present)
 
-class DBConn(Singleton):
     """
-    database module init.
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Architecture).filter_by(arch_string=architecture)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+def get_architecture_suites(architecture, session=None):
+    """
+    Returns list of Suite objects for given C{architecture} name
+
+    @type source: str
+    @param source: Architecture name to search for
+
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: list
+    @return: list of Suite objects for the given name (may be empty)
     """
+
+    if session is None:
+        session = DBConn().session()
+
+    q = session.query(Suite)
+    q = q.join(SuiteArchitecture)
+    q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
+    return q.all()
+
+class Archive(object):
     def __init__(self, *args, **kwargs):
-        super(DBConn, self).__init__(*args, **kwargs)
+        pass
 
-    def _startup(self, *args, **kwargs):
-        self.__createconn()
-        self.__init_caches()
+    def __repr__(self):
+        return '<Archive %s>' % self.name
 
-    ## Connection functions
-    def __createconn(self):
-        cnf = Config()
-        connstr = "dbname=%s" % cnf["DB::Name"]
-        if cnf["DB::Host"]:
-           connstr += " host=%s" % cnf["DB::Host"]
-        if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
-           connstr += " port=%s" % cnf["DB::Port"]
+def get_archive(archive, session=None):
+    """
+    returns database id for given c{archive}.
 
-        self.db_con = psycopg2.connect(connstr)
+    @type archive: string
+    @param archive: the name of the arhive
 
-    def reconnect(self):
-        try:
-            self.db_con.close()
-        except psycopg2.InterfaceError:
-            pass
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
 
-        self.db_con = None
-        self.__createconn()
+    @rtype: Archive
+    @return: Archive object for the given name (None if not present)
 
-    ## Cache functions
-    def __init_caches(self):
-        self.caches = {'suite':         Cache(),
-                       'section':       Cache(),
-                       'priority':      Cache(),
-                       'override_type': Cache(),
-                       'architecture':  Cache(),
-                       'archive':       Cache(),
-                       'component':     Cache(),
-                       'content_path_names':     Cache(),
-                       'content_file_names':     Cache(),
-                       'location':      Cache(lambda x: '%s_%s_%s' % (x['location'], x['component'], x['location'])),
-                       'maintainer':    {}, # TODO
-                       'keyring':       {}, # TODO
-                       'source':        Cache(lambda x: '%s_%s_' % (x['source'], x['version'])),
-                       'files':         Cache(lambda x: '%s_%s_' % (x['filename'], x['location'])),
-                       'maintainer':    {}, # TODO
-                       'fingerprint':   {}, # TODO
-                       'queue':         {}, # TODO
-                       'uid':           {}, # TODO
-                       'suite_version': Cache(lambda x: '%s_%s' % (x['source'], x['suite'])),
-                      }
-
-        self.prepared_statements = {}
+    """
+    archive = archive.lower()
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Archive).filter_by(archive_name=archive)
+    if q.count() == 0:
+        return None
+    return q.one()
 
-    def prepare(self,name,statement):
-        if not self.prepared_statements.has_key(name):
-            c = self.cursor()
-            c.execute(statement)
-            self.prepared_statements[name] = statement
 
-    def clear_caches(self):
-        self.__init_caches()
+class BinAssociation(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-    ## Functions to pass through to the database connector
-    def cursor(self):
-        return self.db_con.cursor()
+    def __repr__(self):
+        return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
 
-    def commit(self):
-        return self.db_con.commit()
+class Binary(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-    ## Get functions
-    def __get_single_id(self, query, values, cachename=None):
-        # This is a bit of a hack but it's an internal function only
-        if cachename is not None:
-            res = self.caches[cachename].GetValue(values)
-            if res:
-                return res
+    def __repr__(self):
+        return '<Binary %s (%s, %s)>' % (self.package, self.version, self.architecture)
 
-        c = self.db_con.cursor()
-        c.execute(query, values)
+def get_binary_from_id(id, session=None):
+    """
+    Returns Binary object for given C{id}
 
-        if c.rowcount != 1:
-            return None
+    @type id: int
+    @param id: Id of the required binary
 
-        res = c.fetchone()[0]
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
 
-        if cachename is not None:
-            self.caches[cachename].SetValue(values, res)
+    @rtype: Binary
+    @return: Binary object for the given binary (None if not present)
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Binary).filter_by(binary_id=id)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+def get_binaries_from_name(package, session=None):
+    """
+    Returns list of Binary objects for given C{package} name
 
-        return res
+    @type package: str
+    @param package: Binary package name to search for
 
-    def __get_id(self, retfield, table, qfield, value):
-        query = "SELECT %s FROM %s WHERE %s = %%(value)s" % (retfield, table, qfield)
-        return self.__get_single_id(query, {'value': value}, cachename=table)
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
 
-    def get_suite_id(self, suite):
-        """
-        Returns database id for given C{suite}.
-        Results are kept in a cache during runtime to minimize database queries.
+    @rtype: list
+    @return: list of Binary objects for the given name (may be empty)
+    """
+    if session is None:
+        session = DBConn().session()
+    return session.query(Binary).filter_by(package=package).all()
 
-        @type suite: string
-        @param suite: The name of the suite
+class Component(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @rtype: int
-        @return: the database id for the given suite
+    def __repr__(self):
+        return '<Component %s>' % self.component_name
 
-        """
-        return int(self.__get_id('id', 'suite', 'suite_name', suite))
+def get_component(component, session=None):
+    """
+    Returns database id for given C{component}.
 
-    def get_section_id(self, section):
-        """
-        Returns database id for given C{section}.
-        Results are kept in a cache during runtime to minimize database queries.
+    @type component: string
+    @param component: The name of the override type
 
-        @type section: string
-        @param section: The name of the section
+    @rtype: int
+    @return: the database id for the given component
 
-        @rtype: int
-        @return: the database id for the given section
+    """
+    component = component.lower()
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Component).filter_by(component_name=component)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+class DBConfig(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        """
-        return self.__get_id('id', 'section', 'section', section)
+    def __repr__(self):
+        return '<DBConfig %s>' % self.name
 
-    def get_priority_id(self, priority):
-        """
-        Returns database id for given C{priority}.
-        Results are kept in a cache during runtime to minimize database queries.
+class ContentFilename(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @type priority: string
-        @param priority: The name of the priority
+    def __repr__(self):
+        return '<ContentFilename %s>' % self.filename
 
-        @rtype: int
-        @return: the database id for the given priority
+class ContentFilepath(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        """
-        return self.__get_id('id', 'priority', 'priority', priority)
+    def __repr__(self):
+        return '<ContentFilepath %s>' % self.filepath
 
-    def get_override_type_id(self, override_type):
-        """
-        Returns database id for given override C{type}.
-        Results are kept in a cache during runtime to minimize database queries.
+class ContentAssociation(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @type type: string
-        @param type: The name of the override type
+    def __repr__(self):
+        return '<ContentAssociation %s>' % self.ca_id
 
-        @rtype: int
-        @return: the database id for the given override type
+class DSCFile(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        """
-        return self.__get_id('id', 'override_type', 'type', override_type)
+    def __repr__(self):
+        return '<DSCFile %s>' % self.dscfile_id
 
-    def get_architecture_id(self, architecture):
-        """
-        Returns database id for given C{architecture}.
-        Results are kept in a cache during runtime to minimize database queries.
+class PoolFile(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @type architecture: string
-        @param architecture: The name of the override type
+    def __repr__(self):
+        return '<PoolFile %s>' % self.filename
 
-        @rtype: int
-        @return: the database id for the given architecture
+class Fingerprint(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        """
-        return self.__get_id('id', 'architecture', 'arch_string', architecture)
+    def __repr__(self):
+        return '<Fingerprint %s>' % self.fingerprint
 
-    def get_archive_id(self, archive):
-        """
-        returns database id for given c{archive}.
-        results are kept in a cache during runtime to minimize database queries.
+class Keyring(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @type archive: string
-        @param archive: the name of the override type
+    def __repr__(self):
+        return '<Keyring %s>' % self.keyring_name
 
-        @rtype: int
-        @return: the database id for the given archive
+class Location(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        """
-        return self.__get_id('id', 'archive', 'lower(name)', archive)
+    def __repr__(self):
+        return '<Location %s (%s)>' % (self.path, self.location_id)
 
-    def get_component_id(self, component):
-        """
-        Returns database id for given C{component}.
-        Results are kept in a cache during runtime to minimize database queries.
+class Maintainer(object):
+    def __init__(self, *args, **kwargs):
+        pass
 
-        @type component: string
-        @param component: The name of the override type
+    def __repr__(self):
+        return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
 
-        @rtype: int
-        @return: the database id for the given component
+class Override(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Override %s (%s)>' % (self.package, self.suite_id)
+
+class OverrideType(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<OverrideType %s>' % self.overridetype
+
+def get_override_type(override_type, session=None):
+    """
+    Returns OverrideType object for given C{override type}.
+
+    @type override_type: string
+    @param override_type: The name of the override type
+
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: int
+    @return: the database id for the given override type
+
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Priority).filter_by(priority=priority)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+class PendingContentAssociation(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<PendingContentAssociation %s>' % self.pca_id
+
+class Priority(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Priority %s (%s)>' % (self.priority, self.priority_id)
+
+def get_priority(priority, session=None):
+    """
+    Returns Priority object for given C{priority name}.
+
+    @type priority: string
+    @param priority: The name of the priority
+
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: Priority
+    @return: Priority object for the given priority
+
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Priority).filter_by(priority=priority)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+class Queue(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Queue %s>' % self.queue_name
+
+class QueueBuild(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<QueueBuild %s (%s)>' % (self.filename, self.queue_id)
+
+class Section(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Section %s>' % self.section
+
+def get_section(section, session=None):
+    """
+    Returns Section object for given C{section name}.
+
+    @type section: string
+    @param section: The name of the section
+
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: Section
+    @return: Section object for the given section name
+
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Section).filter_by(section=section)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+class Source(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Source %s (%s)>' % (self.source, self.version)
+
+def get_sources_from_name(source, session=None):
+    """
+    Returns list of Source objects for given C{source} name
+
+    @type source: str
+    @param source: Source package name to search for
+
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: list
+    @return: list of Source objects for the given name (may be empty)
+    """
+    if session is None:
+        session = DBConn().session()
+    return session.query(Source).filter_by(source=source).all()
+
+def get_source_in_suite(source, suite, session=None):
+    """
+    Returns list of Source objects for a combination of C{source} and C{suite}.
+
+      - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
+      - B{suite} - a suite name, eg. I{unstable}
+
+    @type source: string
+    @param source: source package name
+
+    @type suite: string
+    @param suite: the suite name
+
+    @rtype: string
+    @return: the version for I{source} in I{suite}
+
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(SrcAssociation)
+    q = q.join('source').filter_by(source=source)
+    q = q.join('suite').filter_by(suite_name=suite)
+    if q.count() == 0:
+        return None
+    # ???: Maybe we should just return the SrcAssociation object instead
+    return q.one().source
+
+class SrcAssociation(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
+
+class SrcUploader(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SrcUploader %s>' % self.uploader_id
+
+class Suite(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Suite %s>' % self.suite_name
+
+def get_suite_architecture(suite, architecture, session=None):
+    """
+    Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
+    doesn't exist
+
+    @type suite: str
+    @param suite: Suite name to search for
+
+    @type architecture: str
+    @param architecture: Architecture name to search for
+
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: SuiteArchitecture
+    @return: the SuiteArchitecture object or None
+    """
+
+    if session is None:
+        session = DBConn().session()
+
+    q = session.query(SuiteArchitecture)
+    q = q.join(Architecture).filter_by(arch_string=architecture)
+    q = q.join(Suite).filter_by(suite_name=suite)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+
+def get_suite(suite, session=None):
+    """
+    Returns Suite object for given C{suite name}.
+
+    @type suite: string
+    @param suite: The name of the suite
+
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: Suite
+    @return: Suite object for the requested suite name (None if not presenT)
+
+    """
+    if session is None:
+        session = DBConn().session()
+    q = session.query(Suite).filter_by(suite_name=suite)
+    if q.count() == 0:
+        return None
+    return q.one()
+
+class SuiteArchitecture(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
+
+def get_suite_architectures(suite, session=None):
+    """
+    Returns list of Architecture objects for given C{suite} name
+
+    @type source: str
+    @param source: Suite name to search for
+
+    @type session: Session
+    @param session: Optional SQL session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: list
+    @return: list of Architecture objects for the given name (may be empty)
+    """
+
+    if session is None:
+        session = DBConn().session()
+
+    q = session.query(Architecture)
+    q = q.join(SuiteArchitecture)
+    q = q.join(Suite).filter_by(suite_name=suite).order_by('arch_string')
+    return q.all()
+
+
+class Uid(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<Uid %s (%s)>' % (self.uid, self.name)
+
+################################################################################
+
+class DBConn(Singleton):
+    """
+    database module init.
+    """
+    def __init__(self, *args, **kwargs):
+        super(DBConn, self).__init__(*args, **kwargs)
+
+    def _startup(self, *args, **kwargs):
+        self.debug = False
+        if kwargs.has_key('debug'):
+            self.debug = True
+        self.__createconn()
+
+    def __setuptables(self):
+        self.tbl_architecture = Table('architecture', self.db_meta, autoload=True)
+        self.tbl_archive = Table('archive', self.db_meta, autoload=True)
+        self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
+        self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
+        self.tbl_component = Table('component', self.db_meta, autoload=True)
+        self.tbl_config = Table('config', self.db_meta, autoload=True)
+        self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
+        self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True)
+        self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True)
+        self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True)
+        self.tbl_files = Table('files', self.db_meta, autoload=True)
+        self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
+        self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+        self.tbl_location = Table('location', self.db_meta, autoload=True)
+        self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
+        self.tbl_override = Table('override', self.db_meta, autoload=True)
+        self.tbl_override_type = Table('override_type', self.db_meta, autoload=True)
+        self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True)
+        self.tbl_priority = Table('priority', self.db_meta, autoload=True)
+        self.tbl_queue = Table('queue', self.db_meta, autoload=True)
+        self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
+        self.tbl_section = Table('section', self.db_meta, autoload=True)
+        self.tbl_source = Table('source', self.db_meta, autoload=True)
+        self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
+        self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
+        self.tbl_suite = Table('suite', self.db_meta, autoload=True)
+        self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
+        self.tbl_uid = Table('uid', self.db_meta, autoload=True)
+
+    def __setupmappers(self):
+        mapper(Architecture, self.tbl_architecture,
+               properties = dict(arch_id = self.tbl_architecture.c.id))
+
+        mapper(Archive, self.tbl_archive,
+               properties = dict(archive_id = self.tbl_archive.c.id,
+                                 archive_name = self.tbl_archive.c.name))
+
+        mapper(BinAssociation, self.tbl_bin_associations,
+               properties = dict(ba_id = self.tbl_bin_associations.c.id,
+                                 suite_id = self.tbl_bin_associations.c.suite,
+                                 suite = relation(Suite),
+                                 binary_id = self.tbl_bin_associations.c.bin,
+                                 binary = relation(Binary)))
+
+        mapper(Binary, self.tbl_binaries,
+               properties = dict(binary_id = self.tbl_binaries.c.id,
+                                 package = self.tbl_binaries.c.package,
+                                 version = self.tbl_binaries.c.version,
+                                 maintainer_id = self.tbl_binaries.c.maintainer,
+                                 maintainer = relation(Maintainer),
+                                 source_id = self.tbl_binaries.c.source,
+                                 source = relation(Source),
+                                 arch_id = self.tbl_binaries.c.architecture,
+                                 architecture = relation(Architecture),
+                                 poolfile_id = self.tbl_binaries.c.file,
+                                 poolfile = relation(PoolFile),
+                                 binarytype = self.tbl_binaries.c.type,
+                                 fingerprint_id = self.tbl_binaries.c.sig_fpr,
+                                 fingerprint = relation(Fingerprint),
+                                 install_date = self.tbl_binaries.c.install_date,
+                                 binassociations = relation(BinAssociation,
+                                                            primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
+
+        mapper(Component, self.tbl_component,
+               properties = dict(component_id = self.tbl_component.c.id,
+                                 component_name = self.tbl_component.c.name))
+
+        mapper(DBConfig, self.tbl_config,
+               properties = dict(config_id = self.tbl_config.c.id))
+
+        mapper(ContentAssociation, self.tbl_content_associations,
+               properties = dict(ca_id = self.tbl_content_associations.c.id,
+                                 filename_id = self.tbl_content_associations.c.filename,
+                                 filename    = relation(ContentFilename),
+                                 filepath_id = self.tbl_content_associations.c.filepath,
+                                 filepath    = relation(ContentFilepath),
+                                 binary_id   = self.tbl_content_associations.c.binary_pkg,
+                                 binary      = relation(Binary)))
+
+
+        mapper(ContentFilename, self.tbl_content_file_names,
+               properties = dict(cafilename_id = self.tbl_content_file_names.c.id,
+                                 filename = self.tbl_content_file_names.c.file))
+
+        mapper(ContentFilepath, self.tbl_content_file_paths,
+               properties = dict(cafilepath_id = self.tbl_content_file_paths.c.id,
+                                 filepath = self.tbl_content_file_paths.c.path))
+
+        mapper(DSCFile, self.tbl_dsc_files,
+               properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
+                                 source_id = self.tbl_dsc_files.c.source,
+                                 source = relation(Source),
+                                 poolfile_id = self.tbl_dsc_files.c.file,
+                                 poolfile = relation(PoolFile)))
+
+        mapper(PoolFile, self.tbl_files,
+               properties = dict(file_id = self.tbl_files.c.id,
+                                 filesize = self.tbl_files.c.size,
+                                 location_id = self.tbl_files.c.location,
+                                 location = relation(Location)))
+
+        mapper(Fingerprint, self.tbl_fingerprint,
+               properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
+                                 uid_id = self.tbl_fingerprint.c.uid,
+                                 uid = relation(Uid),
+                                 keyring_id = self.tbl_fingerprint.c.keyring,
+                                 keyring = relation(Keyring)))
+
+        mapper(Keyring, self.tbl_keyrings,
+               properties = dict(keyring_name = self.tbl_keyrings.c.name,
+                                 keyring_id = self.tbl_keyrings.c.id))
+
+        mapper(Location, self.tbl_location,
+               properties = dict(location_id = self.tbl_location.c.id,
+                                 component_id = self.tbl_location.c.component,
+                                 component = relation(Component),
+                                 archive_id = self.tbl_location.c.archive,
+                                 archive = relation(Archive),
+                                 archive_type = self.tbl_location.c.type))
+
+        mapper(Maintainer, self.tbl_maintainer,
+               properties = dict(maintainer_id = self.tbl_maintainer.c.id))
+
+        mapper(Override, self.tbl_override,
+               properties = dict(suite_id = self.tbl_override.c.suite,
+                                 suite = relation(Suite),
+                                 component_id = self.tbl_override.c.component,
+                                 component = relation(Component),
+                                 priority_id = self.tbl_override.c.priority,
+                                 priority = relation(Priority),
+                                 section_id = self.tbl_override.c.section,
+                                 section = relation(Section),
+                                 overridetype_id = self.tbl_override.c.type,
+                                 overridetype = relation(OverrideType)))
+
+        mapper(OverrideType, self.tbl_override_type,
+               properties = dict(overridetype = self.tbl_override_type.c.type,
+                                 overridetype_id = self.tbl_override_type.c.id))
+
+        mapper(PendingContentAssociation, self.tbl_pending_content_associations,
+               properties = dict(pca_id = self.tbl_pending_content_associations.c.id,
+                                 filepath_id = self.tbl_pending_content_associations.c.filepath,
+                                 filepath = relation(ContentFilepath),
+                                 filename_id = self.tbl_pending_content_associations.c.filename,
+                                 filename = relation(ContentFilename)))
+
+        mapper(Priority, self.tbl_priority,
+               properties = dict(priority_id = self.tbl_priority.c.id))
+
+        mapper(Queue, self.tbl_queue,
+               properties = dict(queue_id = self.tbl_queue.c.id))
+
+        mapper(QueueBuild, self.tbl_queue_build,
+               properties = dict(suite_id = self.tbl_queue_build.c.suite,
+                                 queue_id = self.tbl_queue_build.c.queue,
+                                 queue = relation(Queue)))
+
+        mapper(Section, self.tbl_section,
+               properties = dict(section_id = self.tbl_section.c.id))
+
+        mapper(Source, self.tbl_source,
+               properties = dict(source_id = self.tbl_source.c.id,
+                                 version = self.tbl_source.c.version,
+                                 maintainer_id = self.tbl_source.c.maintainer,
+                                 maintainer = relation(Maintainer,
+                                                       primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
+                                 poolfile_id = self.tbl_source.c.file,
+                                 poolfile = relation(PoolFile),
+                                 fingerprint_id = self.tbl_source.c.sig_fpr,
+                                 fingerprint = relation(Fingerprint),
+                                 changedby_id = self.tbl_source.c.changedby,
+                                 changedby = relation(Maintainer,
+                                                      primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
+                                 srcfiles = relation(DSCFile,
+                                                     primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
+                                 srcassociations = relation(SrcAssociation,
+                                                            primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source))))
+
+        mapper(SrcAssociation, self.tbl_src_associations,
+               properties = dict(sa_id = self.tbl_src_associations.c.id,
+                                 suite_id = self.tbl_src_associations.c.suite,
+                                 suite = relation(Suite),
+                                 source_id = self.tbl_src_associations.c.source,
+                                 source = relation(Source)))
+
+        mapper(SrcUploader, self.tbl_src_uploaders,
+               properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
+                                 source_id = self.tbl_src_uploaders.c.source,
+                                 source = relation(Source,
+                                                   primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
+                                 maintainer_id = self.tbl_src_uploaders.c.maintainer,
+                                 maintainer = relation(Maintainer,
+                                                       primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
+
+        mapper(Suite, self.tbl_suite,
+               properties = dict(suite_id = self.tbl_suite.c.id))
+
+        mapper(SuiteArchitecture, self.tbl_suite_architectures,
+               properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
+                                 suite = relation(Suite),
+                                 arch_id = self.tbl_suite_architectures.c.architecture,
+                                 architecture = relation(Architecture)))
+
+        mapper(Uid, self.tbl_uid,
+               properties = dict(uid_id = self.tbl_uid.c.id))
+
+    ## Connection functions
+    def __createconn(self):
+        from config import Config
+        cnf = Config()
+        if cnf["DB::Host"]:
+            # TCP/IP
+            connstr = "postgres://%s" % cnf["DB::Host"]
+            if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
+                connstr += ":%s" % cnf["DB::Port"]
+            connstr += "/%s" % cnf["DB::Name"]
+        else:
+            # Unix Socket
+            connstr = "postgres:///%s" % cnf["DB::Name"]
+            if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
+                connstr += "?port=%s" % cnf["DB::Port"]
+
+        self.db_pg   = create_engine(connstr, echo=self.debug)
+        self.db_meta = MetaData()
+        self.db_meta.bind = self.db_pg
+        self.db_smaker = sessionmaker(bind=self.db_pg,
+                                      autoflush=True,
+                                      autocommit=False)
+
+        self.__setuptables()
+        self.__setupmappers()
+
+    def session(self):
+        return self.db_smaker()
+
+    def prepare(self,name,statement):
+        if not self.prepared_statements.has_key(name):
+            pgc.execute(statement)
+            self.prepared_statements[name] = statement
 
-        """
-        return self.__get_id('id', 'component', 'lower(name)', component)
 
     def get_location_id(self, location, component, archive):
         """
@@ -296,281 +856,202 @@ class DBConn(Singleton):
 
         return res
 
-    def get_source_id(self, source, version):
-        """
-        Returns database id for the combination of C{source} and C{version}
-          - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
-          - B{version}
-        Results are kept in a cache during runtime to minimize database queries.
-
-        @type source: string
-        @param source: source package name
 
-        @type version: string
-        @param version: the source version
-
-        @rtype: int
-        @return: the database id for the source
-
-        """
-        return self.__get_single_id("SELECT id FROM source s WHERE s.source=%(source)s AND s.version=%(version)s",
-                                 {'source': source, 'version': version}, cachename='source')
 
-    def get_suite_version(self, source, suite):
-        """
-        Returns database id for a combination of C{source} and C{suite}.
-
-          - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
-          - B{suite} - a suite name, eg. I{unstable}
-
-        Results are kept in a cache during runtime to minimize database queries.
-
-        @type source: string
-        @param source: source package name
-
-        @type suite: string
-        @param suite: the suite name
-
-        @rtype: string
-        @return: the version for I{source} in I{suite}
-
-        """
-        return self.__get_single_id("""
-        SELECT s.version FROM source s, suite su, src_associations sa
-        WHERE sa.source=s.id
-          AND sa.suite=su.id
-          AND su.suite_name=%(suite)s
-          AND s.source=%(source)""", {'suite': suite, 'source': source}, cachename='suite_version')
-
-
-    def get_files_id (self, filename, size, md5sum, location_id):
-        """
-        Returns -1, -2 or the file_id for filename, if its C{size} and C{md5sum} match an
-        existing copy.
+def get_files_id (self, filename, size, md5sum, location_id):
+    """
+    Returns -1, -2 or the file_id for filename, if its C{size} and C{md5sum} match an
+    existing copy.
 
-        The database is queried using the C{filename} and C{location_id}. If a file does exist
-        at that location, the existing size and md5sum are checked against the provided
-        parameters. A size or checksum mismatch returns -2. If more than one entry is
-        found within the database, a -1 is returned, no result returns None, otherwise
-        the file id.
+    The database is queried using the C{filename} and C{location_id}. If a file does exist
+    at that location, the existing size and md5sum are checked against the provided
+    parameters. A size or checksum mismatch returns -2. If more than one entry is
+    found within the database, a -1 is returned, no result returns None, otherwise
+    the file id.
 
-        Results are kept in a cache during runtime to minimize database queries.
+    @type filename: string
+    @param filename: the filename of the file to check against the DB
 
-        @type filename: string
-        @param filename: the filename of the file to check against the DB
+    @type size: int
+    @param size: the size of the file to check against the DB
 
-        @type size: int
-        @param size: the size of the file to check against the DB
+    @type md5sum: string
+    @param md5sum: the md5sum of the file to check against the DB
 
-        @type md5sum: string
-        @param md5sum: the md5sum of the file to check against the DB
+    @type location_id: int
+    @param location_id: the id of the location as returned by L{get_location_id}
 
-        @type location_id: int
-        @param location_id: the id of the location as returned by L{get_location_id}
+    @rtype: int / None
+    @return: Various return values are possible:
+               - -2: size/checksum error
+               - -1: more than one file found in database
+               - None: no file found in database
+               - int: file id
 
-        @rtype: int / None
-        @return: Various return values are possible:
-                   - -2: size/checksum error
-                   - -1: more than one file found in database
-                   - None: no file found in database
-                   - int: file id
+    """
+    values = {'filename' : filename,
+              'location' : location_id}
 
-        """
-        values = {'filename' : filename,
-                  'location' : location_id}
+    if not res:
+        query = """SELECT id, size, md5sum
+                   FROM files
+                   WHERE filename = %(filename)s AND location = %(location)s"""
 
-        res = self.caches['files'].GetValue( values )
+        cursor = self.db_con.cursor()
+        cursor.execute( query, values )
 
-        if not res:
-            query = """SELECT id, size, md5sum
-                       FROM files
-                       WHERE filename = %(filename)s AND location = %(location)s"""
+        if cursor.rowcount == 0:
+            res = None
 
-            cursor = self.db_con.cursor()
-            cursor.execute( query, values )
+        elif cursor.rowcount != 1:
+            res = -1
 
-            if cursor.rowcount == 0:
-                res = None
+        else:
+            row = cursor.fetchone()
 
-            elif cursor.rowcount != 1:
-                res = -1
+            if row[1] != int(size) or row[2] != md5sum:
+                res =  -2
 
             else:
-                row = cursor.fetchone()
-
-                if row[1] != size or row[2] != md5sum:
-                    res =  -2
-
-                else:
-                    self.caches[cachename].SetValue(values, row[0])
-                    res = row[0]
+                res = row[0]
 
-        return res
+    return res
 
 
-    def get_or_set_contents_file_id(self, filename):
-        """
-        Returns database id for given filename.
+def get_or_set_contents_file_id(self, filename):
+    """
+    Returns database id for given filename.
 
-        Results are kept in a cache during runtime to minimize database queries.
-        If no matching file is found, a row is inserted.
+    If no matching file is found, a row is inserted.
 
-        @type filename: string
-        @param filename: The filename
+    @type filename: string
+    @param filename: The filename
 
-        @rtype: int
-        @return: the database id for the given component
-        """
-        try:
-            values={'value': filename}
-            query = "SELECT id FROM content_file_names WHERE file = %(value)s"
-            id = self.__get_single_id(query, values, cachename='content_file_names')
-            if not id:
-                c = self.db_con.cursor()
-                c.execute( "INSERT INTO content_file_names VALUES (DEFAULT, %(value)s) RETURNING id",
-                           values )
-
-                id = c.fetchone()[0]
-                self.caches['content_file_names'].SetValue(values, id)
-
-            return id
-        except:
-            traceback.print_exc()
-            raise
-
-    def get_or_set_contents_path_id(self, path):
-        """
-        Returns database id for given path.
+    @rtype: int
+    @return: the database id for the given component
+    """
+    try:
+        values={'value': filename}
+        query = "SELECT id FROM content_file_names WHERE file = %(value)s"
+        if not id:
+            c = self.db_con.cursor()
+            c.execute( "INSERT INTO content_file_names VALUES (DEFAULT, %(value)s) RETURNING id",
+                       values )
+
+            id = c.fetchone()[0]
+
+        return id
+    except:
+        traceback.print_exc()
+        raise
+
+def get_or_set_contents_path_id(self, path):
+    """
+    Returns database id for given path.
 
-        Results are kept in a cache during runtime to minimize database queries.
-        If no matching file is found, a row is inserted.
+    If no matching file is found, a row is inserted.
 
-        @type path: string
-        @param path: The filename
+    @type path: string
+    @param path: The filename
 
-        @rtype: int
-        @return: the database id for the given component
-        """
-        try:
-            values={'value': path}
-            query = "SELECT id FROM content_file_paths WHERE path = %(value)s"
-            id = self.__get_single_id(query, values, cachename='content_path_names')
-            if not id:
-                c = self.db_con.cursor()
-                c.execute( "INSERT INTO content_file_paths VALUES (DEFAULT, %(value)s) RETURNING id",
-                           values )
-
-                id = c.fetchone()[0]
-                self.caches['content_path_names'].SetValue(values, id)
-
-            return id
-        except:
-            traceback.print_exc()
-            raise
-
-    def get_suite_architectures(self, suite):
-        """
-        Returns list of architectures for C{suite}.
-
-        @type suite: string, int
-        @param suite: the suite name or the suite_id
-
-        @rtype: list
-        @return: the list of architectures for I{suite}
-        """
+    @rtype: int
+    @return: the database id for the given component
+    """
+    try:
+        values={'value': path}
+        query = "SELECT id FROM content_file_paths WHERE path = %(value)s"
+        if not id:
+            c = self.db_con.cursor()
+            c.execute( "INSERT INTO content_file_paths VALUES (DEFAULT, %(value)s) RETURNING id",
+                       values )
 
-        suite_id = None
-        if type(suite) == str:
-            suite_id = self.get_suite_id(suite)
-        elif type(suite) == int:
-            suite_id = suite
-        else:
-            return None
+            id = c.fetchone()[0]
 
-        c = self.db_con.cursor()
-        c.execute( """SELECT a.arch_string FROM suite_architectures sa
-                      JOIN architecture a ON (a.id = sa.architecture)
-                      WHERE suite='%s'""" % suite_id )
+        return id
+    except:
+        traceback.print_exc()
+        raise
 
-        return map(lambda x: x[0], c.fetchall())
 
-    def insert_content_paths(self, bin_id, fullpaths):
-        """
-        Make sure given path is associated with given binary id
+def insert_content_paths(self, bin_id, fullpaths):
+    """
+    Make sure given path is associated with given binary id
 
-        @type bin_id: int
-        @param bin_id: the id of the binary
-        @type fullpath: string
-        @param fullpath: the path of the file being associated with the binary
+    @type bin_id: int
+    @param bin_id: the id of the binary
+    @type fullpaths: list
+    @param fullpaths: the list of paths of the file being associated with the binary
 
-        @return True upon success
-        """
+    @return: True upon success
+    """
 
-        c = self.db_con.cursor()
+    c = self.db_con.cursor()
 
-        c.execute("BEGIN WORK")
-        try:
+    c.execute("BEGIN WORK")
+    try:
 
-            for fullpath in fullpaths:
-                (path, file) = os.path.split(fullpath)
+        for fullpath in fullpaths:
+            (path, file) = os.path.split(fullpath)
 
-                # Get the necessary IDs ...
-                file_id = self.get_or_set_contents_file_id(file)
-                path_id = self.get_or_set_contents_path_id(path)
+            # Get the necessary IDs ...
+            file_id = self.get_or_set_contents_file_id(file)
+            path_id = self.get_or_set_contents_path_id(path)
 
-                c.execute("""INSERT INTO content_associations
-                               (binary_pkg, filepath, filename)
-                           VALUES ( '%d', '%d', '%d')""" % (bin_id, path_id, file_id) )
+            c.execute("""INSERT INTO content_associations
+                           (binary_pkg, filepath, filename)
+                       VALUES ( '%d', '%d', '%d')""" % (bin_id, path_id, file_id) )
 
-            c.execute("COMMIT")
-            return True
-        except:
-            traceback.print_exc()
-            c.execute("ROLLBACK")
-            return False
+        c.execute("COMMIT")
+        return True
+    except:
+        traceback.print_exc()
+        c.execute("ROLLBACK")
+        return False
 
-    def insert_pending_content_paths(self, package, fullpaths):
-        """
-        Make sure given paths are temporarily associated with given
-        package
+def insert_pending_content_paths(self, package, fullpaths):
+    """
+    Make sure given paths are temporarily associated with given
+    package
 
-        @type package: dict
-        @param package: the package to associate with should have been read in from the binary control file
-        @type fullpaths: list
-        @param fullpaths: the list of paths of the file being associated with the binary
+    @type package: dict
+    @param package: the package to associate with should have been read in from the binary control file
+    @type fullpaths: list
+    @param fullpaths: the list of paths of the file being associated with the binary
 
-        @return True upon success
-        """
+    @return: True upon success
+    """
 
-        c = self.db_con.cursor()
-
-        c.execute("BEGIN WORK")
-        try:
-            arch_id = self.get_architecture_id(package['Architecture'])
-
-                # Remove any already existing recorded files for this package
-            c.execute("""DELETE FROM pending_content_associations
-                         WHERE package=%(Package)s
-                         AND version=%(Version)s
-                         AND arch_id=%d""" % arch_id, package )
-
-            for fullpath in fullpaths:
-                (path, file) = os.path.split(fullpath)
-
-                if path.startswith( "./" ):
-                    path = path[2:]
-                # Get the necessary IDs ...
-                file_id = self.get_or_set_contents_file_id(file)
-                path_id = self.get_or_set_contents_path_id(path)
-
-                c.execute("""INSERT INTO pending_content_associations
-                               (package, version, architecture, filepath, filename)
-                            VALUES (%%(Package)s, %%(Version)s, '%d', '%d', '%d')"""
-                    % (arch_id, path_id, file_id), package )
-
-            c.execute("COMMIT")
-            return True
-        except:
-            traceback.print_exc()
-            c.execute("ROLLBACK")
-            return False
+    c = self.db_con.cursor()
+
+    c.execute("BEGIN WORK")
+    try:
+        arch_id = self.get_architecture_id(package['Architecture'])
+
+        # Remove any already existing recorded files for this package
+        c.execute("""DELETE FROM pending_content_associations
+                     WHERE package=%(Package)s
+                     AND version=%(Version)s
+                     AND architecture=%(ArchID)s""", {'Package': package['Package'],
+                                                      'Version': package['Version'],
+                                                      'ArchID':  arch_id})
+
+        for fullpath in fullpaths:
+            (path, file) = os.path.split(fullpath)
+
+            if path.startswith( "./" ):
+                path = path[2:]
+            # Get the necessary IDs ...
+            file_id = self.get_or_set_contents_file_id(file)
+            path_id = self.get_or_set_contents_path_id(path)
+
+            c.execute("""INSERT INTO pending_content_associations
+                           (package, version, architecture, filepath, filename)
+                        VALUES (%%(Package)s, %%(Version)s, '%d', '%d', '%d')"""
+                % (arch_id, path_id, file_id), package )
+
+        c.execute("COMMIT")
+        return True
+    except:
+        traceback.print_exc()
+        c.execute("ROLLBACK")
+        return False