################################################################################
-import sys, time, types
+import os, sys, time, types, apt_pkg
################################################################################
queue_id_cache = {}
uid_id_cache = {}
suite_version_cache = {}
+suite_bin_version_cache = {}
+content_path_id_cache = {}
+content_file_id_cache = {}
+insert_contents_file_cache = {}
################################################################################
return source_id
-def get_suite_version(source, suite):
+def get_suite_version(source, suite, arch):
global suite_version_cache
cache_key = "%s_%s" % (source, suite)
return version
+def get_latest_binary_version_id(binary, section, suite, arch):
+ global suite_bin_version_cache
+ cache_key = "%s_%s_%s_%s" % (binary, section, suite, arch)
+
+ if suite_bin_version_cache.has_key(cache_key):
+ return suite_bin_version_cache[cache_key]
+
+ q = projectB.query("SELECT b.id, b.version FROM binaries b JOIN bin_associations ba ON (b.id = ba.bin) JOIN override o ON (o.package=b.package) WHERE b.package = '%s' AND b.architecture = '%d' AND ba.suite = '%d' AND o.section = '%d'" % (binary, int(arch), int(suite), int(section)))
+
+ highest_bid, highest_version = None, None
+
+ for bi in q.getresult():
+ if highest_version == None or apt_pkg.VersionCompare(bi[1], highest_version) == 1:
+ highest_bid = bi[0]
+ highest_version = bi[1]
+
+ suite_bin_version_cache[cache_key] = highest_bid
+ return highest_bid
+
################################################################################
def get_or_set_maintainer_id (maintainer):
################################################################################
-def get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
+def get_files_id (filename, size, md5sum, location_id):
global files_id_cache
cache_key = "%s_%d" % (filename, location_id)
return files_id_cache[cache_key]
size = int(size)
- q = projectB.query("SELECT id, size, md5sum, sha1sum, sha256sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
+ q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
ql = q.getresult()
if ql:
if len(ql) != 1:
ql = ql[0]
orig_size = int(ql[1])
orig_md5sum = ql[2]
- orig_sha1sum = ql[3]
- orig_sha256sum = ql[4]
- if orig_size != size or orig_md5sum != md5sum or orig_sha1sum != sha1sum or orig_sha256sum != sha256sum:
+ if orig_size != size or orig_md5sum != md5sum:
return -2
files_id_cache[cache_key] = ql[0]
return files_id_cache[cache_key]
def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
global files_id_cache
- projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id))
+ projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', '%s', '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id))
- return get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id)
+ return get_files_id (filename, size, md5sum, location_id)
### currval has issues with postgresql 7.1.3 when the table is big
### it was taking ~3 seconds to return on auric which is very Not
return maintainer_cache[maintainer_id]
################################################################################
+
+def get_suites(pkgname, src=False):
+ if src:
+ sql = "select suite_name from source, src_associations,suite where source.id=src_associations.source and source.source='%s' and src_associations.suite = suite.id"%pkgname
+ else:
+ sql = "select suite_name from binaries, bin_associations,suite where binaries.id=bin_associations.bin and package='%s' and bin_associations.suite = suite.id"%pkgname
+ q = projectB.query(sql)
+ return map(lambda x: x[0], q.getresult())
+
+################################################################################
+
+def get_or_set_contents_file_id(file):
+ global content_file_id_cache
+
+ if not content_file_id_cache.has_key(file):
+ sql_select = "SELECT id FROM content_file_names WHERE file = '%s'" % file
+ q = projectB.query(sql_select)
+ if not q.getresult():
+ # since this can be called within a transaction, we can't use currval
+ q = projectB.query("SELECT nextval('content_file_names_id_seq')")
+ file_id = int(q.getresult()[0][0])
+ projectB.query("INSERT INTO content_file_names VALUES ('%d', '%s')" % (file_id, file))
+ content_file_id_cache[file] = file_id
+ else:
+ content_file_id_cache[file] = int(q.getresult()[0][0])
+ return content_file_id_cache[file]
+
+################################################################################
+
+def get_or_set_contents_path_id(path):
+ global content_path_id_cache
+
+ if not content_path_id_cache.has_key(path):
+ sql_select = "SELECT id FROM content_file_paths WHERE path = '%s'" % path
+ q = projectB.query(sql_select)
+ if not q.getresult():
+ # since this can be called within a transaction, we can't use currval
+ q = projectB.query("SELECT nextval('content_file_names_id_seq')")
+ path_id = int(q.getresult()[0][0])
+ projectB.query("INSERT INTO content_file_paths VALUES ('%d', '%s')" % ( path_id, path))
+ content_path_id_cache[path] = path_id
+ else:
+ content_path_id_cache[path] = int(q.getresult()[0][0])
+
+ return content_path_id_cache[path]
+
+################################################################################
+
+def insert_content_path(bin_id, fullpath):
+ global insert_contents_file_cache
+ cache_key = "%s_%s" % (bin_id, fullpath)
+
+ # have we seen this contents before?
+ # probably only revelant during package import
+ if insert_contents_file_cache.has_key(cache_key):
+ return
+
+ # split the path into basename, and pathname
+ (path, file) = os.path.split(fullpath)
+
+ # Get the necessary IDs ...
+ file_id = get_or_set_contents_file_id(file)
+ path_id = get_or_set_contents_path_id(path)
+
+ # Determine if we're inserting a duplicate row
+ q = projectB.query("SELECT 1 FROM content_associations WHERE binary_pkg = '%d' AND filepath = '%d' AND filename = '%d'" % (int(bin_id), path_id, file_id))
+ if q.getresult():
+ # Yes we are, return without doing the insert
+ print "Inserting dup row"
+ return
+
+ # Put them into content_assiocations
+ projectB.query("INSERT INTO content_associations VALUES (DEFAULT, '%d', '%d', '%d')" % (bin_id, path_id, file_id))
+ return