X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;ds=sidebyside;f=daklib%2Fdatabase.py;h=c39c83b1dc7ca46c91036612f3fc0d9ce3e05238;hb=5e83da98cf5bd0100a850e32ede0e0b702044d7c;hp=e11d3cd6c83777b4fe8d9b0dbb783a9dca91ad68;hpb=fc2f0edf2d2bfafd627da4336689d41f064f78f5;p=dak.git diff --git a/daklib/database.py b/daklib/database.py index e11d3cd6..c39c83b1 100755 --- a/daklib/database.py +++ b/daklib/database.py @@ -19,7 +19,7 @@ ################################################################################ -import sys, time, types +import os, sys, time, types, apt_pkg ################################################################################ @@ -42,6 +42,10 @@ fingerprint_id_cache = {} queue_id_cache = {} uid_id_cache = {} suite_version_cache = {} +suite_bin_version_cache = {} +content_path_id_cache = {} +content_file_id_cache = {} +insert_contents_file_cache = {} ################################################################################ @@ -224,7 +228,7 @@ def get_source_id (source, version): return source_id -def get_suite_version(source, suite): +def get_suite_version(source, suite, arch): global suite_version_cache cache_key = "%s_%s" % (source, suite) @@ -247,6 +251,25 @@ def get_suite_version(source, suite): return version +def get_latest_binary_version_id(binary, section, suite, arch): + global suite_bin_version_cache + cache_key = "%s_%s_%s_%s" % (binary, section, suite, arch) + + if suite_bin_version_cache.has_key(cache_key): + return suite_bin_version_cache[cache_key] + + q = projectB.query("SELECT b.id, b.version FROM binaries b JOIN bin_associations ba ON (b.id = ba.bin) JOIN override o ON (o.package=b.package) WHERE b.package = '%s' AND b.architecture = '%d' AND ba.suite = '%d' AND o.section = '%d'" % (binary, int(arch), int(suite), int(section))) + + highest_bid, highest_version = None, None + + for bi in q.getresult(): + if highest_version == None or apt_pkg.VersionCompare(bi[1], highest_version) == 1: + highest_bid = bi[0] + highest_version = bi[1] + + suite_bin_version_cache[cache_key] = highest_bid + return highest_bid + ################################################################################ def get_or_set_maintainer_id (maintainer): @@ -363,7 +386,7 @@ def get_or_set_queue_id (queue): def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id): global files_id_cache - projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id)) + projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', '%s', '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id)) return get_files_id (filename, size, md5sum, location_id) @@ -389,3 +412,77 @@ def get_maintainer (maintainer_id): return maintainer_cache[maintainer_id] ################################################################################ + +def get_suites(pkgname, src=False): + if src: + sql = "select suite_name from source, src_associations,suite where source.id=src_associations.source and source.source='%s' and src_associations.suite = suite.id"%pkgname + else: + sql = "select suite_name from binaries, bin_associations,suite where binaries.id=bin_associations.bin and package='%s' and bin_associations.suite = suite.id"%pkgname + q = projectB.query(sql) + return map(lambda x: x[0], q.getresult()) + +################################################################################ + +def get_or_set_contents_file_id(file): + global content_file_id_cache + + if not content_file_id_cache.has_key(file): + sql_select = "SELECT id FROM content_file_names WHERE file = '%s'" % file + q = projectB.query(sql_select) + if not q.getresult(): + # since this can be called within a transaction, we can't use currval + q = projectB.query("SELECT nextval('content_file_names_id_seq')") + file_id = int(q.getresult()[0][0]) + projectB.query("INSERT INTO content_file_names VALUES ('%d', '%s')" % (file_id, file)) + content_file_id_cache[file] = file_id + else: + content_file_id_cache[file] = int(q.getresult()[0][0]) + return content_file_id_cache[file] + +################################################################################ + +def get_or_set_contents_path_id(path): + global content_path_id_cache + + if not content_path_id_cache.has_key(path): + sql_select = "SELECT id FROM content_file_paths WHERE path = '%s'" % path + q = projectB.query(sql_select) + if not q.getresult(): + # since this can be called within a transaction, we can't use currval + q = projectB.query("SELECT nextval('content_file_names_id_seq')") + path_id = int(q.getresult()[0][0]) + projectB.query("INSERT INTO content_file_paths VALUES ('%d', '%s')" % ( path_id, path)) + content_path_id_cache[path] = path_id + else: + content_path_id_cache[path] = int(q.getresult()[0][0]) + + return content_path_id_cache[path] + +################################################################################ + +def insert_content_path(bin_id, fullpath): + global insert_contents_file_cache + cache_key = "%s_%s" % (bin_id, fullpath) + + # have we seen this contents before? + # probably only revelant during package import + if insert_contents_file_cache.has_key(cache_key): + return + + # split the path into basename, and pathname + (path, file) = os.path.split(fullpath) + + # Get the necessary IDs ... + file_id = get_or_set_contents_file_id(file) + path_id = get_or_set_contents_path_id(path) + + # Determine if we're inserting a duplicate row + q = projectB.query("SELECT 1 FROM content_associations WHERE binary_pkg = '%d' AND filepath = '%d' AND filename = '%d'" % (int(bin_id), path_id, file_id)) + if q.getresult(): + # Yes we are, return without doing the insert + print "Inserting dup row" + return + + # Put them into content_assiocations + projectB.query("INSERT INTO content_associations VALUES (DEFAULT, '%d', '%d', '%d')" % (bin_id, path_id, file_id)) + return