]> git.decadent.org.uk Git - dak.git/commitdiff
revert change to get_files_id
authorMark Hymers <mhy@debian.org>
Sat, 16 Aug 2008 03:14:11 +0000 (03:14 +0000)
committerMark Hymers <mhy@debian.org>
Sat, 16 Aug 2008 03:14:11 +0000 (03:14 +0000)
Signed-off-by: Mark Hymers <mhy@debian.org>
ChangeLog
dak/process_accepted.py
dak/process_unchecked.py
daklib/database.py

index 770fe5ff7a1c827f62086aad6cb4c06bd67c5ab2..3e3f33e17d99a365095a4aae29e6fafaf5e27c77 100644 (file)
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,9 @@
 2008-08-15  Mark Hymers  <mhy@debian.org>
 
+       *  dak/process_accepted.py, dak/process_unchecked.py,
+       daklib/database.py: Don't change get_files_id to use sha1sum and
+       sha256sum.
+
        * setup/init_pool.sql, dak/check_archive.py, dak/decode_dot_dak.py,
        dak/process_accepted.py, dak/process_unchecked.py, daklib/database.py,
        daklib/queue.py, daklib/utils.py: Attempt to add sha1sum and
index b28d9f9bc00eab3226808616f4613bf4b2eaa2f2..a26ce57a4226d1246871c13180eff770658dcaf6 100755 (executable)
@@ -311,7 +311,7 @@ def install ():
                 # files id is stored in dsc_files by check_dsc().
                 files_id = dsc_files[dsc_file].get("files id", None)
                 if files_id == None:
-                    files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+                    files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
                 # FIXME: needs to check for -1/-2 and or handle exception
                 if files_id == None:
                     files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
index 3354a5774c91f4a23fe28ad42a2d2edc195c25ba..93187709cf78bc4dedf12c998b8b3539b13f2441 100755 (executable)
@@ -630,11 +630,11 @@ def check_files():
 
             # Check the md5sum & size against existing files (if any)
             files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
-            files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
+            files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
             if files_id == -1:
                 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
             elif files_id == -2:
-                reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
+                reject("md5sum and/or size mismatch on existing copy of %s." % (f))
             files[f]["files id"] = files_id
 
             # Check for packages that have moved from one component to another
index b2b55a781eeea7f4139e0bb4306971d9f3ab5cef..e11d3cd6c83777b4fe8d9b0dbb783a9dca91ad68 100755 (executable)
@@ -317,7 +317,7 @@ def get_or_set_fingerprint_id (fingerprint):
 
 ################################################################################
 
-def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
+def get_files_id (filename, size, md5sum, location_id):
     global files_id_cache
 
     cache_key = "%s_%d" % (filename, location_id)
@@ -326,7 +326,7 @@ def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
         return files_id_cache[cache_key]
 
     size = int(size)
-    q = projectB.query("SELECT id, size, md5sum, sha1sum, sha256sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
+    q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
     ql = q.getresult()
     if ql:
         if len(ql) != 1:
@@ -334,9 +334,7 @@ def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
         ql = ql[0]
         orig_size = int(ql[1])
         orig_md5sum = ql[2]
-        orig_sha1sum = ql[3]
-        orig_sha256sum = ql[4]
-        if orig_size != size or orig_md5sum != md5sum or orig_sha1sum != sha1sum or orig_sha256sum != sha256sum:
+        if orig_size != size or orig_md5sum != md5sum:
             return -2
         files_id_cache[cache_key] = ql[0]
         return files_id_cache[cache_key]
@@ -367,7 +365,7 @@ def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
 
     projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id))
 
-    return get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id)
+    return get_files_id (filename, size, md5sum, location_id)
 
     ### currval has issues with postgresql 7.1.3 when the table is big
     ### it was taking ~3 seconds to return on auric which is very Not