]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
add_dsc_to_db: fix typo u <-> up
[dak.git] / daklib / dbconn.py
index ecb610311bb23ad36cfecb81fa37c65f1a3032c6..85ba3ab3fb90fe25d681b63269cf24831f7ad7b1 100644 (file)
@@ -377,16 +377,16 @@ def get_binary_from_name_suite(package, suitename, session=None):
 
     sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
              FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
-             WHERE b.package=:package
+             WHERE b.package='%(package)s'
                AND b.file = fi.id
                AND fi.location = l.id
                AND l.component = c.id
                AND ba.bin=b.id
                AND ba.suite = su.id
-               AND su.suite_name=:suitename
+               AND su.suite_name %(suitename)s
           ORDER BY b.version DESC"""
 
-    return session.execute(sql, {'package': package, 'suitename': suitename})
+    return session.execute(sql % {'package': package, 'suitename': suitename})
 
 __all__.append('get_binary_from_name_suite')
 
@@ -1437,70 +1437,14 @@ class DBChange(object):
     def __repr__(self):
         return '<DBChange %s>' % self.changesname
 
-    def upload_into_db(self, u, path):
-        cnf = Config()
-        session = DBConn().session().object_session(self)
-
-        files = []
-        for chg_fn, entry in u.pkg.files.items():
-            try:
-                f = open(os.path.join(path, chg_fn))
-                cpf = ChangePendingFile()
-                cpf.filename = chg_fn
-                cpf.size = entry['size']
-                cpf.md5sum = entry['md5sum']
-
-                if entry.has_key('sha1sum'):
-                    cpf.sha1sum = entry['sha1sum']
-                else:
-                    f.seek(0)
-                    cpf.sha1sum = apt_pkg.sha1sum(f)
-
-                if entry.has_key('sha256sum'):
-                    cpf.sha256sum = entry['sha256sum']
-                else:
-                    f.seek(0)
-                    cpf.sha256sum = apt_pkg.sha256sum(f)
-
-                session.add(cpf)
-                files.append(cpf)
-                f.close()
-
-            except IOError:
-                # Can't find the file, try to look it up in the pool
-                from utils import poolify
-                poolname = poolify(entry["source"], entry["component"])
-                l = get_location(cnf["Dir::Pool"], entry["component"], session=session)
-
-                found, poolfile = check_poolfile(os.path.join(poolname, chg_fn),
-                                                 entry['size'],
-                                                 entry["md5sum"],
-                                                 l.location_id,
-                                                 session=session)
-
-                if found is None:
-                    Logger.log(["E: Found multiple files for pool (%s) for %s" % (chg_fn, entry["component"])])
-                elif found is False and poolfile is not None:
-                    Logger.log(["E: md5sum/size mismatch for %s in pool" % (chg_fn)])
-                else:
-                    if poolfile is None:
-                        Logger.log(["E: Could not find %s in pool" % (chg_fn)])
-                    else:
-                        chg.poolfiles.append(poolfile)
-
-        chg.files = files
-
-
     def clean_from_queue(self):
         session = DBConn().session().object_session(self)
 
         # Remove changes_pool_files entries
-        for pf in self.poolfiles:
-            self.poolfiles.remove(pf)
+        self.poolfiles = []
 
-        # Remove change
-        for cf in self.files:
-            self.files.remove(cf)
+        # Remove changes_pending_files references
+        self.files = []
 
         # Clear out of queue
         self.in_queue = None
@@ -2333,21 +2277,22 @@ def add_dsc_to_db(u, filename, session=None):
             uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
 
     added_ids = {}
-    for up in uploader_ids:
-        if added_ids.has_key(up):
-            utils.warn("Already saw uploader %s for source %s" % (up, source.source))
+    for up_id in uploader_ids:
+        if added_ids.has_key(up_id):
+            import utils
+            utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
             continue
 
-        added_ids[u]=1
+        added_ids[up_id]=1
 
         su = SrcUploader()
-        su.maintainer_id = up
+        su.maintainer_id = up_id
         su.source_id = source.source_id
         session.add(su)
 
     session.flush()
 
-    return dsc_component, dsc_location_id, pfs
+    return source, dsc_component, dsc_location_id, pfs
 
 __all__.append('add_dsc_to_db')