"Generate lists of packages per suite for apt-ftparchive"),
("generate-releases",
"Generate Release files"),
+ ("generate-contents",
+ "Generate contest files"),
("generate-index-diffs",
"Generate .diff/Index files"),
("clean-suites",
"Check for users with no packages in the archive"),
("import-archive",
"Populate SQL database based from an archive tree"),
+ ("import-contents",
+ "Populate SQL database with Contents files"),
("import-keyring",
"Populate fingerprint/uid table based on a new/updated keyring"),
("import-ldap-fingerprints",
################################################################################
def do_update(self):
+vvvvvvvvvvvvvvvvvvvv
print "Note: to be able to enable the the PL/Perl (plperl) procedural language, we do"
print "need postgresql-plperl-$postgres-version installed. Make sure that this is the"
print "case before you continue. Interrupt if it isn't, sleeping 5 seconds now."
print "(We need to be database superuser for this to work!)"
time.sleep (5)
+^^^^^^^^^^^^^^^^^^^^
try:
c = self.db.cursor()
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Debian Archive Kit Database Update Script
+Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
+Copyright © 2008 Roger Leigh <rleigh@debian.org>
+
+Debian Archive Kit Database Update Script 2
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+# <tomv_w> really, if we want to screw ourselves, let's find a better way.
+# <Ganneff> rm -rf /srv/ftp.debian.org
+
+################################################################################
+
+import psycopg2, time
+
+################################################################################
+
+def do_update(self):
+ print "Adding content fields to database"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""CREATE TABLE content_file_paths (
+ id serial primary key not null,
+ path text unique not null
+ )""")
+
+ c.execute("""CREATE TABLE content_file_names (
+ id serial primary key not null,
+ file text unique not null
+ )""")
+
+ c.execute("""CREATE TABLE content_associations (
+ id serial not null,
+ binary_pkg int4 not null references binaries(id) on delete cascade,
+ filepath int4 not null references content_file_paths(id) on delete cascade,
+ filename int4 not null references content_file_names(id) on delete cascade
+ );""")
+
+ c.execute("""CREATE FUNCTION comma_concat(text, text) RETURNS text
+ AS $_$select case
+ WHEN $2 is null or $2 = '' THEN $1
+ WHEN $1 is null or $1 = '' THEN $2
+ ELSE $1 || ',' || $2
+ END$_$
+ LANGUAGE sql""")
+
+ c.execute("""CREATE AGGREGATE comma_separated_list (
+ BASETYPE = text,
+ SFUNC = comma_concat,
+ STYPE = text,
+ INITCOND = ''
+ );""")
+
+ c.execute("UPDATE config SET value = '2' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ print "REMINDER: Remember to fully regenerate the Contents files before running import-contents"
+ print ""
+ print "Pausing for five seconds ..."
+ time.sleep (5)
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ print "FATAL: Unable to apply debversion table update 2!"
+ print "Error Message: " + str(msg)
+ print "Database changes have been rolled back."
--- /dev/null
+#!/usr/bin/env python
+# Create all the contents files
+
+# Copyright (C) 2008, 2009 Michael Casadevall <mcasadevall@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+# <Ganneff> there is the idea to slowly replace contents files
+# <Ganneff> with a new generation of such files.
+# <Ganneff> having more info.
+# <Ganneff> of course that wont help for now where we need to generate them :)
+################################################################################
+
+################################################################################
+
+import sys, os, popen2, tempfile, stat, time, pg
+import gzip, apt_pkg
+from daklib import database, utils
+from daklib.dak_exceptions import *
+
+################################################################################
+
+Cnf = None
+projectB = None
+out = None
+AptCnf = None
+
+################################################################################
+
+def usage (exit_code=0):
+ print """Usage: dak generate-contents
+Generate Contents files
+
+ -h, --help show this help and exit
+ -s, --suite=SUITE only write file lists for this suite
+"""
+ sys.exit(exit_code)
+
+################################################################################
+
+def generate_contents(suites):
+ global projectB, Cnf
+ # Ok, the contents information is in the database
+
+ # We need to work and get the contents, and print it out on a per
+ # architectual basis
+
+ # Read in the contents file header
+ header = False
+ if Cnf.has_key("Generate-Contents::Header"):
+ h = open(Cnf["Generate-Contents::Header"], "r")
+ header = h.read()
+ h.close()
+
+ # Get our suites, and the architectures
+ for s in [i.lower() for i in suites]:
+ suite_id = database.get_suite_id(s)
+
+ q = projectB.query("SELECT s.architecture, a.arch_string FROM suite_architectures s JOIN architecture a ON (s.architecture=a.id) WHERE suite = '%d'" % suite_id)
+
+ arch_list = [ ]
+ for r in q.getresult():
+ if r[1] != "source" and r[1] != "all":
+ arch_list.append((r[0], r[1]))
+
+ arch_all_id = database.get_architecture_id("all")
+
+ # Time for the query from hell. Essentially, we need to get the assiocations, the filenames, the paths,
+ # and all that fun stuff from the database.
+
+ for arch_id in arch_list:
+ q = projectB.query("""SELECT p.path||'/'||n.file, comma_separated_list(s.section||'/'||b.package) FROM content_associations c JOIN content_file_paths p ON (c.filepath=p.id) JOIN content_file_names n ON (c.filename=n.id) JOIN binaries b ON (b.id=c.binary_pkg) JOIN bin_associations ba ON (b.id=ba.bin) JOIN override o ON (o.package=b.package) JOIN section s ON (s.id=o.section) WHERE (b.architecture = '%d' OR b.architecture = '%d') AND ba.suite = '%d' AND b.type = 'deb' GROUP BY (p.path||'/'||n.file)""" % (arch_id[0], arch_all_id, suite_id))
+
+ f = gzip.open(Cnf["Dir::Root"] + "dists/%s/Contents-%s.gz" % (s, arch_id[1]), "w")
+
+ if header:
+ f.write(header)
+
+ for contents in q.getresult():
+ f.write(contents[0] + "\t\t\t" + contents[-1] + "\n")
+
+ f.close()
+
+ # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
+ # This is HORRIBLY debian specific :-/
+ # First off, udeb
+
+ section_id = database.get_section_id('debian-installer') # all udebs should be here)
+
+ if section_id != -1:
+ q = projectB.query("""SELECT p.path||'/'||n.file, comma_separated_list(s.section||'/'||b.package) FROM content_associations c JOIN content_file_paths p ON (c.filepath=p.id) JOIN content_file_names n ON (c.filename=n.id) JOIN binaries b ON (b.id=c.binary_pkg) JOIN bin_associations ba ON (b.id=ba.bin) JOIN override o ON (o.package=b.package) JOIN section s ON (s.id=o.section) WHERE s.id = '%d' AND ba.suite = '%d' AND b.type = 'udeb' GROUP BY (p.path||'/'||n.file)""" % (section_id, suite_id))
+
+ f = gzip.open(Cnf["Dir::Root"] + "dists/%s/Contents-udeb.gz" % (s), "w")
+
+ if header:
+ f.write(header)
+
+ for contents in q.getresult():
+ f.write(contents[0] + "\t\t\t" + contents[-1] + "\n")
+
+ f.close()
+
+ # Once more, with non-free
+ section_id = database.get_section_id('non-free/debian-installer') # all udebs should be here)
+
+ if section_id != -1:
+ q = projectB.query("""SELECT p.path||'/'||n.file, comma_separated_list(s.section||'/'||b.package) FROM content_associations c JOIN content_file_paths p ON (c.filepath=p.id) JOIN content_file_names n ON (c.filename=n.id) JOIN binaries b ON (b.id=c.binary_pkg) JOIN bin_associations ba ON (b.id=ba.bin) JOIN override o ON (o.package=b.package) JOIN section s ON (s.id=o.section) WHERE s.id = '%d' AND ba.suite = '%d' AND b.type = 'udeb' GROUP BY (p.path||'/'||n.file)""" % (section_id, suite_id))
+
+ f = gzip.open(Cnf["Dir::Root"] + "dists/%s/Contents-udeb-nf.gz" % (s), "w")
+
+ if header:
+ f.write(header)
+
+ for contents in q.getresult():
+ f.write(contents[0] + "\t\t\t" + contents[-1] + "\n")
+
+ f.close()
+
+################################################################################
+
+def main ():
+ global Cnf, projectB, out
+ out = sys.stdout
+
+ Cnf = utils.get_conf()
+
+ Arguments = [('h',"help","Generate-Contents::Options::Help"),
+ ('s',"suite","Generate-Contents::Options::Suite","HasArg"),
+ ]
+
+ for i in [ "help", "suite" ]:
+ if not Cnf.has_key("Generate-Contents::Options::%s" % (i)):
+ Cnf["Generate-Contents::Options::%s" % (i)] = ""
+
+ suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
+ Options = Cnf.SubTree("Generate-Contents::Options")
+
+ if Options["Help"]:
+ usage()
+
+ if Options["Suite"]:
+ suites = utils.split_args(Options["Suite"])
+ else:
+ suites = Cnf.SubTree("Suite").List()
+
+ projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
+ database.init(Cnf, projectB)
+
+ generate_contents(suites)
+
+#######################################################################################
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+#!/usr/bin/env python2.4
+# Import contents files
+
+# Copyright (C) 2008, 2009 Michael Casadevall <mcasadevall@debian.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+################################################################################
+
+################################################################################
+
+import sys, os, popen2, tempfile, stat, time, pg
+import re, gzip, apt_pkg
+from daklib import database, utils
+from daklib.dak_exceptions import *
+
+################################################################################
+
+Cnf = None
+projectB = None
+out = None
+AptCnf = None
+has_opened_temp_file_lists = False
+content_path_file = ""
+content_name_file = ""
+content_file_cache = set([])
+content_name_cache = set([])
+content_path_cache = set([])
+
+################################################################################
+
+def usage (exit_code=0):
+ print """Usage: dak import-contents
+Import Contents files
+
+ -h, --help show this help and exit
+ -s, --suite=SUITE only write file lists for this suite
+"""
+ sys.exit(exit_code)
+
+################################################################################
+
+def cache_content_path(fullpath):
+ global content_file_cache, contents_name_cache, content_path_cache
+
+ # have we seen this contents before?
+ if fullpath in content_file_cache:
+ return
+
+ # Add the new key to the cache
+ content_file_cache.add(fullpath)
+
+ # split the path into basename, and pathname
+ (path, file) = os.path.split(fullpath)
+
+ # Due to performance reasons, we need to get the entire filelists table
+ # sorted first before we can do assiocation tables.
+ if path not in content_path_cache:
+ content_path_cache.add(path)
+
+ if file not in content_name_cache:
+ content_name_cache.add(file)
+
+ return
+
+################################################################################
+
+def import_contents(suites):
+ global projectB, Cnf
+
+ # Start transaction
+ projectB.query("BEGIN WORK")
+
+ # Needed to make sure postgreSQL doesn't freak out on some of the data
+ projectB.query("SET CLIENT_ENCODING TO 'LATIN1'")
+
+ # Precache everything
+ #print "Precaching binary information, this will take a few moments ..."
+ #database.preload_binary_id_cache()
+
+ # Prep regexs
+ line_regex = re.compile(r'^(.+?)\s+(\S+)$')
+ pkg_regex = re.compile(r'(\S+)/(\S+)$')
+ file_regex = re.compile('^FILE')
+
+ # Get our suites, and the architectures
+ for s in suites:
+ suite_id = database.get_suite_id(s)
+
+ arch_list = [ ]
+ for r in Cnf.ValueList("Suite::%s::Architectures" % (s)):
+ if r != "source" and r != "all":
+ arch_list.append(r)
+
+ arch_all_id = database.get_architecture_id("all")
+
+ for arch in arch_list:
+ print "Processing %s/%s" % (s, arch)
+ arch_id = database.get_architecture_id(arch)
+
+ try:
+ f = gzip.open(Cnf["Dir::Root"] + "dists/%s/Contents-%s.gz" % (s, arch), "r")
+
+ except:
+ print "Unable to open dists/%s/Contents-%s.gz" % (s, arch)
+ print "Skipping ..."
+ continue
+
+ # Get line count
+ lines = f.readlines()
+ num_of_lines = len(lines)
+
+ # Ok, the file cursor is at the first entry, now comes the fun 'lets parse' bit
+ lines_processed = 0
+ found_header = False
+
+ for line in lines:
+ if found_header == False:
+ if not line:
+ print "Unable to find end of Contents-%s.gz header!" % (arch)
+ sys.exit(255)
+
+ lines_processed += 1
+ if file_regex.match(line):
+ found_header = True
+ continue
+
+ # The format is simple enough, *filename*, *section/package1,section/package2,etc*
+ # Each file appears once per Contents file, so first, use some regex match
+ # to split the two bits
+
+ # Print out progress bar
+ print "\rProcessed %d lines of %d (%%%.2f)" % (lines_processed, num_of_lines, ((float(lines_processed)/num_of_lines)*100)),
+
+ # regex lifted from packages.d.o code
+ matchs = line_regex.findall(line)
+ filename = matchs[0][0]
+ packages = matchs[0][1].split(',')
+
+
+ cache_content_path(filename)
+
+ # Iterate through each file's packages
+ #for package in packages:
+ # matchs = pkg_regex.findall(package)
+
+ # Needed since the DB is unicode, and these files
+ # are ASCII
+ # section_name = matchs[0][0]
+ # package_name = matchs[0][1]
+
+ #section_id = database.get_section_id(section_name)
+ #package_id = database.get_latest_binary_version_id(package_name, section_id, suite_id, arch_id)
+
+ # if package_id == None:
+ # This can happen if the Contents file refers to a non-existant package
+ # it seems Contents sometimes can be stale due to use of caches (i.e., hurd-i386)
+ # hurd-i386 was removed from the archive, but its Contents file still exists
+ # and is seemingly still updated. The sane thing to do is skip it and continue
+ # continue
+
+
+ lines_processed += 1
+
+ print "" # newline since the Progress bar doesn't print one
+ f.close()
+
+ # Commit work
+
+ print "Committing to database ..."
+ projectB.query("COPY content_file_names (file) FROM STDIN")
+
+ for line in content_name_cache:
+ projectB.putline("%s\n" % (line))
+
+ projectB.endcopy()
+
+ projectB.query("COMMIT")
+
+################################################################################
+
+def main ():
+ global Cnf, projectB, out
+ out = sys.stdout
+
+ Cnf = utils.get_conf()
+
+ Arguments = [('h',"help","Import-Contents::Options::Help"),
+ ('s',"suite","Import-Contents::Options::Suite","HasArg"),
+ ]
+
+ for i in [ "help", "suite" ]:
+ if not Cnf.has_key("Import-Contents::Options::%s" % (i)):
+ Cnf["Import-Contents::Options::%s" % (i)] = ""
+
+ suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
+ Options = Cnf.SubTree("Import-Contents::Options")
+
+ if Options["Help"]:
+ usage()
+
+ if Options["Suite"]:
+ suites = utils.split_args(Options["Suite"])
+ else:
+ suites = Cnf.SubTree("Suite").List()
+
+ projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
+ database.init(Cnf, projectB)
+
+ import_contents(suites)
+
+#######################################################################################
+
+if __name__ == '__main__':
+ main()
###############################################################################
import errno, fcntl, os, sys, time, re
-import apt_pkg
+import apt_pkg, tarfile, commands
from daklib import database
from daklib import logging
from daklib import queue
else:
os.unlink(self.log_filename)
+
+###############################################################################
+
+def generate_contents_information(filename):
+ # Generate all the contents for the database
+ cmd = "ar t %s" % (filename)
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: 'ar t' invocation failed." % (filename))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Ugh ... this is ugly ... Code ripped from process_unchecked.py
+ chunks = output.split('\n')
+ cmd = "ar x %s %s" % (filename, chunks[2])
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: 'ar t' invocation failed." % (filename))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Got deb tarballs, now lets go through and determine what bits
+ # and pieces the deb had ...
+ if chunks[2] == "data.tar.gz":
+ data = tarfile.open("data.tar.gz", "r:gz")
+ elif data_tar == "data.tar.bz2":
+ data = tarfile.open("data.tar.bz2", "r:bz2")
+ else:
+ os.remove(chunks[2])
+ reject("couldn't find data.tar.*")
+
+ contents = []
+ for tarinfo in data:
+ if not tarinfo.isdir():
+ contents.append(tarinfo.name[2:])
+
+ os.remove(chunks[2])
+ return contents
+
###############################################################################
def reject (str, prefix="Rejected: "):
source = files[file]["source package"]
source_version = files[file]["source version"]
filename = files[file]["pool name"] + file
+ contents = generate_contents_information(file)
if not files[file].has_key("location id") or not files[file]["location id"]:
files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
if not files[file].has_key("files id") or not files[file]["files id"]:
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
+ # insert contents into the database
+ q = projectB.query("SELECT currval('binaries_id_seq')")
+ bin_id = int(q.getresult()[0][0])
+ for file in contents:
+ database.insert_content_path(bin_id, file)
+
# If the .orig.tar.gz is in a legacy directory we need to poolify
# it, so that apt-get source (and anything else that goes by the
# "Directory:" field in the Sources.gz file) works.
utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
for dest in copy_dot_dak.keys():
utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
-
projectB.query("COMMIT WORK")
# Move the .changes into the 'done' directory
Cnf = None
projectB = None
-required_database_schema = 3
+required_database_schema = 4
################################################################################
################################################################################
-import sys, time, types
+import os, sys, time, types, apt_pkg
################################################################################
queue_id_cache = {}
uid_id_cache = {}
suite_version_cache = {}
+suite_bin_version_cache = {}
+content_path_id_cache = {}
+content_file_id_cache = {}
+insert_contents_file_cache = {}
+cache_preloaded = False
################################################################################
return source_id
-def get_suite_version(source, suite):
+def get_suite_version(source, suite, arch):
global suite_version_cache
cache_key = "%s_%s" % (source, suite)
return version
+def get_latest_binary_version_id(binary, section, suite, arch):
+ global suite_bin_version_cache
+ cache_key = "%s_%s_%s_%s" % (binary, section, suite, arch)
+ cache_key_all = "%s_%s_%s_%s" % (binary, section, suite, get_architecture_id("all"))
+
+ # Check for the cache hit for its arch, then arch all
+ if suite_bin_version_cache.has_key(cache_key):
+ return suite_bin_version_cache[cache_key]
+ if suite_bin_version_cache.has_key(cache_key_all):
+ return suite_bin_version_cache[cache_key_all]
+ if cache_preloaded == True:
+ return # package does not exist
+
+ q = projectB.query("SELECT DISTINCT b.id FROM binaries b JOIN bin_associations ba ON (b.id = ba.bin) JOIN override o ON (o.package=b.package) WHERE b.package = '%s' AND b.architecture = '%d' AND ba.suite = '%d' AND o.section = '%d'" % (binary, int(arch), int(suite), int(section)))
+
+ if not q.getresult():
+ return False
+
+ highest_bid = q.getresult()[0][0]
+
+ suite_bin_version_cache[cache_key] = highest_bid
+ return highest_bid
+
+def preload_binary_id_cache():
+ global suite_bin_version_cache, cache_preloaded
+
+ # Get suite info
+ q = projectB.query("SELECT id FROM suite")
+ suites = q.getresult()
+
+ # Get arch mappings
+ q = projectB.query("SELECT id FROM architecture")
+ arches = q.getresult()
+
+ for suite in suites:
+ for arch in arches:
+ q = projectB.query("SELECT DISTINCT b.id, b.package, o.section FROM binaries b JOIN bin_associations ba ON (b.id = ba.bin) JOIN override o ON (o.package=b.package) WHERE b.architecture = '%d' AND ba.suite = '%d'" % (int(arch[0]), int(suite[0])))
+
+ for bi in q.getresult():
+ cache_key = "%s_%s_%s_%s" % (bi[1], bi[2], suite[0], arch[0])
+ suite_bin_version_cache[cache_key] = int(bi[0])
+
+ cache_preloaded = True
+
################################################################################
def get_or_set_maintainer_id (maintainer):
sql = "select suite_name from binaries, bin_associations,suite where binaries.id=bin_associations.bin and package='%s' and bin_associations.suite = suite.id"%pkgname
q = projectB.query(sql)
return map(lambda x: x[0], q.getresult())
+
+################################################################################
+
+def get_or_set_contents_file_id(file):
+ global content_file_id_cache
+
+ if not content_file_id_cache.has_key(file):
+ sql_select = "SELECT id FROM content_file_names WHERE file = '%s'" % file
+ q = projectB.query(sql_select)
+ if not q.getresult():
+ # since this can be called within a transaction, we can't use currval
+ q = projectB.query("INSERT INTO content_file_names VALUES (DEFAULT, '%s') RETURNING id" % (file))
+ content_file_id_cache[file] = int(q.getresult()[0][0])
+ return content_file_id_cache[file]
+
+################################################################################
+
+def get_or_set_contents_path_id(path):
+ global content_path_id_cache
+
+ if not content_path_id_cache.has_key(path):
+ sql_select = "SELECT id FROM content_file_paths WHERE path = '%s'" % path
+ q = projectB.query(sql_select)
+ if not q.getresult():
+ # since this can be called within a transaction, we can't use currval
+ q = projectB.query("INSERT INTO content_file_paths VALUES (DEFAULT, '%s') RETURNING id" % (path))
+ content_path_id_cache[path] = int(q.getresult()[0][0])
+ return content_path_id_cache[path]
+
+################################################################################
+
+def insert_content_path(bin_id, fullpath):
+ global insert_contents_file_cache
+ cache_key = "%s_%s" % (bin_id, fullpath)
+
+ # have we seen this contents before?
+ # probably only revelant during package import
+ if insert_contents_file_cache.has_key(cache_key):
+ return
+
+ # split the path into basename, and pathname
+ (path, file) = os.path.split(fullpath)
+
+ # Get the necessary IDs ...
+ file_id = get_or_set_contents_file_id(file)
+ path_id = get_or_set_contents_path_id(path)
+
+ # Determine if we're inserting a duplicate row
+ q = projectB.query("SELECT 1 FROM content_associations WHERE binary_pkg = '%d' AND filepath = '%d' AND filename = '%d'" % (int(bin_id), path_id, file_id))
+ if q.getresult():
+ # Yes we are, return without doing the insert
+ return
+
+ # Put them into content_assiocations
+ projectB.query("INSERT INTO content_associations VALUES (DEFAULT, '%d', '%d', '%d')" % (bin_id, path_id, file_id))
+ return
<helix> elmo: I can't believe people pay you to fix computers
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+* Ganneff ponders how to best write the text to -devel. (need to tell em in
+ case they find more bugs). "We fixed the fucking idiotic broken implementation
+ to be less so" is probably not the nicest, even if perfect valid, way to say so
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%