]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
moved inserts of known_changes to Changes() class. add insert known_changes in p...
[dak.git] / daklib / dbconn.py
index eee5cfc2f82e9718b62387eace4eaf4dd39d0629..2b0a849254d928bc0d134d294d77a13f98ebf71c 100755 (executable)
@@ -51,7 +51,6 @@ from sqlalchemy.orm.exc import NoResultFound
 from config import Config
 from singleton import Singleton
 from textutils import fix_maintainer
-from utils import ensure_orig_files
 
 ################################################################################
 
@@ -902,6 +901,41 @@ __all__.append('get_or_set_keyring')
 
 ################################################################################
 
+class KnownChange(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<KnownChange %s>' % self.changesname
+
+__all__.append('KnownChange')
+
+@session_wrapper
+def get_knownchange(filename, session=None):
+    """
+    returns knownchange object for given C{filename}.
+
+    @type archive: string
+    @param archive: the name of the arhive
+
+    @type session: Session
+    @param session: Optional SQLA session object (a temporary one will be
+    generated if not supplied)
+
+    @rtype: Archive
+    @return: Archive object for the given name (None if not present)
+
+    """
+    q = session.query(KnownChange).filter_by(changesname=filename)
+
+    try:
+        return q.one()
+    except NoResultFound:
+        return None
+
+__all__.append('get_knownchange')
+
+################################################################################
 class Location(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -1415,26 +1449,47 @@ class Queue(object):
 
                 session.add(qb)
 
-            exists, symlinked = ensure_orig_files(changes, dest, session)
-
-            # Add symlinked files to the list of packages for later processing
-            # by apt-ftparchive
-            for filename in symlinked:
-                qb = QueueBuild()
-                qb.suite_id = s.suite_id
-                qb.queue_id = self.queue_id
-                qb.filename = filename
-                qb.in_queue = True
-                session.add(qb)
+            # If the .orig tarballs are in the pool, create a symlink to
+            # them (if one doesn't already exist)
+            for dsc_file in changes.dsc_files.keys():
+                # Skip all files except orig tarballs
+                from daklib.regexes import re_is_orig_source
+                if not re_is_orig_source.match(dsc_file):
+                    continue
+                # Skip orig files not identified in the pool
+                if not (changes.orig_files.has_key(dsc_file) and
+                        changes.orig_files[dsc_file].has_key("id")):
+                    continue
+                orig_file_id = changes.orig_files[dsc_file]["id"]
+                dest = os.path.join(dest_dir, dsc_file)
+
+                # If it doesn't exist, create a symlink
+                if not os.path.exists(dest):
+                    q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
+                                        {'id': orig_file_id})
+                    res = q.fetchone()
+                    if not res:
+                        return "[INTERNAL ERROR] Couldn't find id %s in files table." % (orig_file_id)
+
+                    src = os.path.join(res[0], res[1])
+                    os.symlink(src, dest)
 
-            # Update files to ensure they are not removed prematurely
-            for filename in exists:
-                qb = get_queue_build(filename, s.suite_id, session)
-                if qb is None:
+                    # Add it to the list of packages for later processing by apt-ftparchive
+                    qb = QueueBuild()
+                    qb.suite_id = s.suite_id
+                    qb.queue_id = self.queue_id
+                    qb.filename = dest
                     qb.in_queue = True
-                    qb.last_used = None
                     session.add(qb)
 
+                # If it does, update things to ensure it's not removed prematurely
+                else:
+                    qb = get_queue_build(dest, s.suite_id, session)
+                    if qb is None:
+                        qb.in_queue = True
+                        qb.last_used = None
+                        session.add(qb)
+
         if privatetrans:
             session.commit()
             session.close()
@@ -2096,6 +2151,7 @@ class DBConn(Singleton):
         self.tbl_files = Table('files', self.db_meta, autoload=True)
         self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
         self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+        self.tbl_known_changes = Table('known_changes', self.db_meta, autoload=True)
         self.tbl_location = Table('location', self.db_meta, autoload=True)
         self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
         self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
@@ -2198,6 +2254,9 @@ class DBConn(Singleton):
                properties = dict(keyring_name = self.tbl_keyrings.c.name,
                                  keyring_id = self.tbl_keyrings.c.id))
 
+        mapper(KnownChange, self.tbl_known_changes,
+               properties = dict(known_change_id = self.tbl_known_changes.c.id))
+
         mapper(Location, self.tbl_location,
                properties = dict(location_id = self.tbl_location.c.id,
                                  component_id = self.tbl_location.c.component,