4 # Copyright (C) 2000, 2001 James Troup <james@nocrew.org>
5 # $Id: neve,v 1.7 2001-11-18 19:57:58 rmurray Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ################################################################################
23 # 04:36|<aj> elmo: you're making me waste 5 seconds per architecture!!!!!! YOU BASTARD!!!!!
25 ################################################################################
27 # This code is a horrible mess for two reasons:
29 # (o) For Debian's usage, it's doing something like 160k INSERTs,
30 # even on auric, that makes the program unusable unless we get
31 # involed in sorts of silly optimization games (local dicts to avoid
32 # redundant SELECTS, using COPY FROM rather than INSERTS etc.)
34 # (o) It's very site specific, because I don't expect to use this
35 # script again in a hurry, and I don't want to spend any more time
36 # on it than absolutely necessary.
38 ###############################################################################################################
40 import commands, os, pg, re, sys, string, tempfile
42 import db_access, utils
44 ###############################################################################################################
46 re_arch_from_filename = re.compile(r"binary-[^/]+")
48 ###############################################################################################################
59 src_associations_id_serial = 0;
60 dsc_files_id_serial = 0;
61 files_query_cache = None;
62 source_query_cache = None;
63 src_associations_query_cache = None;
64 dsc_files_query_cache = None;
65 orig_tar_gz_cache = {};
67 binaries_id_serial = 0;
68 binaries_query_cache = None;
69 bin_associations_id_serial = 0;
70 bin_associations_query_cache = None;
72 source_cache_for_binaries = {};
74 ###############################################################################################################
76 # Prepares a filename or directory (s) to be file.filename by stripping any part of the location (sub) from it.
78 for i in xrange(len(sub)):
79 if sub[i:] == s[0:len(sub)-i]:
80 return s[len(sub)-i:];
83 def update_archives ():
84 projectB.query("DELETE FROM archive")
85 for archive in Cnf.SubTree("Archive").List():
86 SubSec = Cnf.SubTree("Archive::%s" % (archive));
87 projectB.query("INSERT INTO archive (name, origin_server, description) VALUES ('%s', '%s', '%s')"
88 % (archive, SubSec["OriginServer"], SubSec["Description"]));
90 def update_components ():
91 projectB.query("DELETE FROM component")
92 for component in Cnf.SubTree("Component").List():
93 SubSec = Cnf.SubTree("Component::%s" % (component));
94 projectB.query("INSERT INTO component (name, description, meets_dfsg) VALUES ('%s', '%s', '%s')" %
95 (component, SubSec["Description"], SubSec["MeetsDFSG"]));
97 def update_locations ():
98 projectB.query("DELETE FROM location")
99 for location in Cnf.SubTree("Location").List():
100 SubSec = Cnf.SubTree("Location::%s" % (location));
101 archive_id = db_access.get_archive_id(SubSec["archive"]);
102 type = SubSec.Find("type");
103 if type == "legacy-mixed":
104 projectB.query("INSERT INTO location (path, archive, type) VALUES ('%s', %d, '%s')" % (location, archive_id, SubSec["type"]));
106 for component in Cnf.SubTree("Component").List():
107 component_id = db_access.get_component_id(component);
108 projectB.query("INSERT INTO location (path, component, archive, type) VALUES ('%s', %d, %d, '%s')" %
109 (location, component_id, archive_id, SubSec["type"]));
111 def update_architectures ():
112 projectB.query("DELETE FROM architecture")
113 for arch in Cnf.SubTree("Architectures").List():
114 projectB.query("INSERT INTO architecture (arch_string, description) VALUES ('%s', '%s')" % (arch, Cnf["Architectures::%s" % (arch)]))
116 def update_suites ():
117 projectB.query("DELETE FROM suite")
118 for suite in Cnf.SubTree("Suite").List():
119 SubSec = Cnf.SubTree("Suite::%s" %(suite))
120 projectB.query("INSERT INTO suite (suite_name) VALUES ('%s')" % string.lower(suite));
121 for i in ("Version", "Origin", "Description"):
122 if SubSec.has_key(i):
123 projectB.query("UPDATE suite SET %s = '%s' WHERE suite_name = '%s'" % (string.lower(i), SubSec[i], string.lower(suite)))
124 for architecture in Cnf.SubTree("Suite::%s::Architectures" % (suite)).List():
125 architecture_id = db_access.get_architecture_id (architecture);
126 projectB.query("INSERT INTO suite_architectures (suite, architecture) VALUES (currval('suite_id_seq'), %d)" % (architecture_id));
128 ##############################################################################################################
130 def get_or_set_files_id (filename, size, md5sum, location_id):
131 global files_id_cache, files_id_serial, files_query_cache;
133 cache_key = string.join((filename, size, md5sum, repr(location_id)), '~')
134 if not files_id_cache.has_key(cache_key):
135 files_id_serial = files_id_serial + 1
136 files_query_cache.write("%d\t%s\t%s\t%s\t%d\n" % (files_id_serial, filename, size, md5sum, location_id));
137 files_id_cache[cache_key] = files_id_serial
139 return files_id_cache[cache_key]
141 ##############################################################################################################
143 def process_sources (location, filename, suite, component, archive):
144 global source_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, source_id_serial, src_associations_id_serial, dsc_files_id_serial, source_cache_for_binaries, orig_tar_gz_cache;
146 suite = string.lower(suite)
147 suite_id = db_access.get_suite_id(suite);
148 if suite == 'stable':
149 testing_id = db_access.get_suite_id("testing");
151 file = utils.open_file (filename);
152 except utils.cant_open_exc:
153 print "WARNING: can't open '%s'" % (filename);
155 Scanner = apt_pkg.ParseTagFile(file)
156 while Scanner.Step() != 0:
157 package = Scanner.Section["package"]
158 version = Scanner.Section["version"]
159 maintainer = Scanner.Section["maintainer"]
160 maintainer = string.replace(maintainer, "'", "\\'")
161 maintainer_id = db_access.get_or_set_maintainer_id(maintainer);
162 directory = Scanner.Section["directory"]
163 location_id = db_access.get_location_id (location, component, archive)
164 if directory[-1:] != "/":
165 directory = directory + '/';
166 directory = poolify (directory, location);
167 if directory != "" and directory[-1:] != "/":
168 directory = directory + '/';
169 no_epoch_version = utils.re_no_epoch.sub('', version)
170 # Add all files referenced by the .dsc to the files table
172 for line in string.split(Scanner.Section["files"],'\n'):
174 (md5sum, size, filename) = string.split(string.strip(line));
175 # Don't duplicate .orig.tar.gz's
176 if filename[-12:] == ".orig.tar.gz":
177 cache_key = "%s~%s~%s" % (filename, size, md5sum);
178 if orig_tar_gz_cache.has_key(cache_key):
179 id = orig_tar_gz_cache[cache_key];
181 id = get_or_set_files_id (directory + filename, size, md5sum, location_id);
182 orig_tar_gz_cache[cache_key] = id;
184 id = get_or_set_files_id (directory + filename, size, md5sum, location_id);
186 # If this is the .dsc itself; save the ID for later.
187 if filename[-4:] == ".dsc":
189 filename = directory + package + '_' + no_epoch_version + '.dsc'
190 cache_key = "%s~%s" % (package, version)
191 if not source_cache.has_key(cache_key):
192 nasty_key = "%s~%s" % (package, version)
193 source_id_serial = source_id_serial + 1;
194 if not source_cache_for_binaries.has_key(nasty_key):
195 source_cache_for_binaries[nasty_key] = source_id_serial;
196 tmp_source_id = source_id_serial;
197 source_cache[cache_key] = source_id_serial;
198 source_query_cache.write("%d\t%s\t%s\t%d\t%d\n" % (source_id_serial, package, version, maintainer_id, files_id))
200 dsc_files_id_serial = dsc_files_id_serial + 1;
201 dsc_files_query_cache.write("%d\t%d\t%d\n" % (dsc_files_id_serial, tmp_source_id,id));
203 tmp_source_id = source_cache[cache_key];
205 src_associations_id_serial = src_associations_id_serial + 1;
206 src_associations_query_cache.write("%d\t%d\t%d\n" % (src_associations_id_serial, suite_id, tmp_source_id))
207 # populate 'testing' with a mirror of 'stable'
208 if suite == "stable":
209 src_associations_id_serial = src_associations_id_serial + 1;
210 src_associations_query_cache.write("%d\t%d\t%d\n" % (src_associations_id_serial, testing_id, tmp_source_id))
214 ##############################################################################################################
216 def process_packages (location, filename, suite, component, archive):
217 global arch_all_cache, binary_cache, binaries_id_serial, binaries_query_cache, bin_associations_id_serial, bin_associations_query_cache;
221 suite = string.lower(suite);
222 suite_id = db_access.get_suite_id(suite);
223 if suite == "stable":
224 testing_id = db_access.get_suite_id("testing");
226 file = utils.open_file (filename);
227 except utils.cant_open_exc:
228 print "WARNING: can't open '%s'" % (filename);
230 Scanner = apt_pkg.ParseTagFile(file);
231 while Scanner.Step() != 0:
232 package = Scanner.Section["package"]
233 version = Scanner.Section["version"]
234 maintainer = Scanner.Section["maintainer"]
235 maintainer = string.replace(maintainer, "'", "\\'")
236 maintainer_id = db_access.get_or_set_maintainer_id(maintainer);
237 architecture = Scanner.Section["architecture"]
238 architecture_id = db_access.get_architecture_id (architecture);
239 if not Scanner.Section.has_key("source"):
242 source = Scanner.Section["source"]
244 if string.find(source, "(") != -1:
245 m = utils.re_extract_src_version.match(source)
247 source_version = m.group(2)
248 if not source_version:
249 source_version = version
250 filename = Scanner.Section["filename"]
251 location_id = db_access.get_location_id (location, component, archive)
252 filename = poolify (filename, location)
253 if architecture == "all":
254 filename = re_arch_from_filename.sub("binary-all", filename);
255 cache_key = "%s~%s" % (source, source_version);
256 source_id = source_cache_for_binaries.get(cache_key, None);
257 size = Scanner.Section["size"];
258 md5sum = Scanner.Section["md5sum"];
259 files_id = get_or_set_files_id (filename, size, md5sum, location_id);
260 type = "deb"; # FIXME
261 cache_key = "%s~%s~%s~%d~%d~%d" % (package, version, repr(source_id), architecture_id, location_id, files_id);
262 if not arch_all_cache.has_key(cache_key):
263 arch_all_cache[cache_key] = 1;
264 cache_key = "%s~%s~%s~%d" % (package, version, repr(source_id), architecture_id);
265 if not binary_cache.has_key(cache_key):
268 count_bad = count_bad + 1;
270 source_id = repr(source_id);
271 binaries_id_serial = binaries_id_serial + 1;
272 binaries_query_cache.write("%d\t%s\t%s\t%d\t%s\t%d\t%d\t%s\n" % (binaries_id_serial, package, version, maintainer_id, source_id, architecture_id, files_id, type));
273 binary_cache[cache_key] = binaries_id_serial;
274 tmp_binaries_id = binaries_id_serial;
276 tmp_binaries_id = binary_cache[cache_key];
278 bin_associations_id_serial = bin_associations_id_serial + 1;
279 bin_associations_query_cache.write("%d\t%d\t%d\n" % (bin_associations_id_serial, suite_id, tmp_binaries_id));
280 if suite == "stable":
281 bin_associations_id_serial = bin_associations_id_serial + 1;
282 bin_associations_query_cache.write("%d\t%d\t%d\n" % (bin_associations_id_serial, testing_id, tmp_binaries_id));
283 count_total = count_total +1;
287 print "%d binary packages processed; %d with no source match which is %.2f%%" % (count_total, count_bad, (float(count_bad)/count_total)*100);
289 print "%d binary packages processed; 0 with no source match which is 0%%" % (count_total);
291 ##############################################################################################################
293 def do_sources(location, prefix, suite, component, server):
294 temp_filename = tempfile.mktemp();
295 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
297 sources = location + prefix + 'Sources.gz';
298 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (sources, temp_filename));
300 utils.fubar("Gunzip invocation failed!\n%s" % (output), result);
301 print 'Processing '+sources+'...';
302 process_sources (location, temp_filename, suite, component, server);
303 os.unlink(temp_filename);
305 ##############################################################################################################
308 global Cnf, projectB, query_cache, files_query_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, bin_associations_query_cache, binaries_query_cache;
310 Cnf = utils.get_conf()
312 print "Re-Creating DB..."
313 (result, output) = commands.getstatusoutput("psql -f init_pool.sql")
315 utils.fubar("psql invocation failed!\n", result);
318 projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]), None, None, 'postgres')
320 db_access.init (Cnf, projectB);
322 print "Adding static tables from conf file..."
323 projectB.query("BEGIN WORK");
324 update_architectures();
329 projectB.query("COMMIT WORK");
331 files_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"files","w");
332 source_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"source","w");
333 src_associations_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"src_associations","w");
334 dsc_files_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"dsc_files","w");
335 binaries_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"binaries","w");
336 bin_associations_query_cache = utils.open_file(Cnf["Neve::ExportDir"]+"bin_associations","w");
338 projectB.query("BEGIN WORK");
339 # Process Sources files to popoulate `source' and friends
340 for location in Cnf.SubTree("Location").List():
341 SubSec = Cnf.SubTree("Location::%s" % (location));
342 server = SubSec["Archive"];
343 type = Cnf.Find("Location::%s::Type" % (location));
344 if type == "legacy-mixed":
346 suite = Cnf.Find("Location::%s::Suite" % (location));
347 do_sources(location, prefix, suite, "", server);
348 elif type == "legacy":
349 for suite in Cnf.SubTree("Location::%s::Suites" % (location)).List():
350 for component in Cnf.SubTree("Component").List():
351 prefix = Cnf.Find("Suite::%s::CodeName" % (suite)) + '/' + component + '/source/'
352 do_sources(location, prefix, suite, component, server);
355 # for component in Cnf.SubTree("Component").List():
356 # prefix = component + '/'
357 # do_sources(location, prefix);
359 utils.fubar("Unknown location type ('%s')." % (type));
361 # Process Packages files to populate `binaries' and friends
363 for location in Cnf.SubTree("Location").List():
364 SubSec = Cnf.SubTree("Location::%s" % (location));
365 server = SubSec["Archive"];
366 type = Cnf.Find("Location::%s::Type" % (location));
367 if type == "legacy-mixed":
368 packages = location + 'Packages';
369 suite = Cnf.Find("Location::%s::Suite" % (location));
370 print 'Processing '+location+'...';
371 process_packages (location, packages, suite, "", server);
372 elif type == "legacy":
373 for suite in Cnf.SubTree("Location::%s::Suites" % (location)).List():
374 for component in Cnf.SubTree("Component").List():
375 for architecture in Cnf.SubTree("Suite::%s::Architectures" % (suite)).List():
376 if architecture == "source" or architecture == "all":
378 packages = location + Cnf.Find("Suite::%s::CodeName" % (suite)) + '/' + component + '/binary-' + architecture + '/Packages'
379 print 'Processing '+packages+'...';
380 process_packages (location, packages, suite, component, server);
384 files_query_cache.close();
385 source_query_cache.close();
386 src_associations_query_cache.close();
387 dsc_files_query_cache.close();
388 binaries_query_cache.close();
389 bin_associations_query_cache.close();
390 print "Writing data to `files' table...";
391 projectB.query("COPY files FROM '%s'" % (Cnf["Neve::ExportDir"]+"files"));
392 print "Writing data to `source' table...";
393 projectB.query("COPY source FROM '%s'" % (Cnf["Neve::ExportDir"]+"source"));
394 print "Writing data to `src_associations' table...";
395 projectB.query("COPY src_associations FROM '%s'" % (Cnf["Neve::ExportDir"]+"src_associations"));
396 print "Writing data to `dsc_files' table...";
397 projectB.query("COPY dsc_files FROM '%s'" % (Cnf["Neve::ExportDir"]+"dsc_files"));
398 print "Writing data to `binaries' table...";
399 projectB.query("COPY binaries FROM '%s'" % (Cnf["Neve::ExportDir"]+"binaries"));
400 print "Writing data to `bin_associations' table...";
401 projectB.query("COPY bin_associations FROM '%s'" % (Cnf["Neve::ExportDir"]+"bin_associations"));
402 print "Committing...";
403 projectB.query("COMMIT WORK");
405 # Add the constraints and otherwise generally clean up the database.
406 # See add_constraints.sql for more details...
408 print "Running add_constraints.sql...";
409 (result, output) = commands.getstatusoutput("psql projectb < add_constraints.sql");
412 utils.fubar("psql invocation failed!\n%s" % (output), result);
416 if __name__ == '__main__':