4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 # 04:36|<aj> elmo: you're making me waste 5 seconds per architecture!!!!!! YOU BASTARD!!!!!
24 ###############################################################################
26 # This code is a horrible mess for two reasons:
28 # (o) For Debian's usage, it's doing something like 160k INSERTs,
29 # even on auric, that makes the program unusable unless we get
30 # involed in sorts of silly optimization games (local dicts to avoid
31 # redundant SELECTS, using COPY FROM rather than INSERTS etc.)
33 # (o) It's very site specific, because I don't expect to use this
34 # script again in a hurry, and I don't want to spend any more time
35 # on it than absolutely necessary.
37 ###############################################################################
39 import commands, os, pg, re, sys, time
41 import daklib.database
44 ###############################################################################
46 re_arch_from_filename = re.compile(r"binary-[^/]+")
48 ###############################################################################
56 location_path_cache = {}
60 src_associations_id_serial = 0
61 dsc_files_id_serial = 0
62 files_query_cache = None
63 source_query_cache = None
64 src_associations_query_cache = None
65 dsc_files_query_cache = None
66 orig_tar_gz_cache = {}
68 binaries_id_serial = 0
69 binaries_query_cache = None
70 bin_associations_id_serial = 0
71 bin_associations_query_cache = None
73 source_cache_for_binaries = {}
76 ################################################################################
78 def usage(exit_code=0):
79 print """Usage: dak import-archive
80 Initializes a projectB database from an existing archive
82 -a, --action actually perform the initalization
83 -h, --help show this help and exit."""
86 ###############################################################################
88 def reject (str, prefix="Rejected: "):
91 reject_message += prefix + str + "\n"
93 ###############################################################################
95 def check_signature (filename):
96 if not daklib.utils.re_taint_free.match(os.path.basename(filename)):
97 reject("!!WARNING!! tainted filename: '%s'." % (filename))
100 status_read, status_write = os.pipe()
101 cmd = "gpgv --status-fd %s %s %s" \
102 % (status_write, daklib.utils.gpg_keyring_args(), filename)
103 (output, status, exit_status) = daklib.utils.gpgv_get_status_output(cmd, status_read, status_write)
105 # Process the status-fd output
107 bad = internal_error = ""
108 for line in status.split('\n'):
114 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
116 (gnupg, keyword) = split[:2]
117 if gnupg != "[GNUPG:]":
118 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
121 if keywords.has_key(keyword) and keyword != "NODATA" and keyword != "SIGEXPIRED":
122 internal_error += "found duplicate status token ('%s').\n" % (keyword)
125 keywords[keyword] = args
127 # If we failed to parse the status-fd output, let's just whine and bail now
129 reject("internal error while performing signature check on %s." % (filename))
130 reject(internal_error, "")
131 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
134 # Now check for obviously bad things in the processed output
135 if keywords.has_key("SIGEXPIRED"):
136 daklib.utils.warn("%s: signing key has expired." % (filename))
137 if keywords.has_key("KEYREVOKED"):
138 reject("key used to sign %s has been revoked." % (filename))
140 if keywords.has_key("BADSIG"):
141 reject("bad signature on %s." % (filename))
143 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
144 reject("failed to check signature on %s." % (filename))
146 if keywords.has_key("NO_PUBKEY"):
147 args = keywords["NO_PUBKEY"]
149 reject("internal error while checking signature on %s." % (filename))
152 fingerprint = args[0]
153 if keywords.has_key("BADARMOR"):
154 reject("ascii armour of signature was corrupt in %s." % (filename))
156 if keywords.has_key("NODATA"):
157 daklib.utils.warn("no signature found for %s." % (filename))
159 #reject("no signature found in %s." % (filename))
165 # Next check gpgv exited with a zero return code
166 if exit_status and not keywords.has_key("NO_PUBKEY"):
167 reject("gpgv failed while checking %s." % (filename))
169 reject(daklib.utils.prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
171 reject(daklib.utils.prefix_multi_line_string(output, " [GPG output:] "), "")
174 # Sanity check the good stuff we expect
175 if not keywords.has_key("VALIDSIG"):
176 if not keywords.has_key("NO_PUBKEY"):
177 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (filename))
180 args = keywords["VALIDSIG"]
182 reject("internal error while checking signature on %s." % (filename))
185 fingerprint = args[0]
186 if not keywords.has_key("GOODSIG") and not keywords.has_key("NO_PUBKEY"):
187 reject("signature on %s does not appear to be valid [No GOODSIG]." % (filename))
189 if not keywords.has_key("SIG_ID") and not keywords.has_key("NO_PUBKEY"):
190 reject("signature on %s does not appear to be valid [No SIG_ID]." % (filename))
193 # Finally ensure there's not something we don't recognise
194 known_keywords = daklib.utils.Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
195 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
198 for keyword in keywords.keys():
199 if not known_keywords.has_key(keyword):
200 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], filename))
208 ################################################################################
210 # Prepares a filename or directory (s) to be file.filename by stripping any part of the location (sub) from it.
211 def poolify (s, sub):
212 for i in xrange(len(sub)):
213 if sub[i:] == s[0:len(sub)-i]:
214 return s[len(sub)-i:]
217 def update_archives ():
218 projectB.query("DELETE FROM archive")
219 for archive in Cnf.SubTree("Archive").List():
220 SubSec = Cnf.SubTree("Archive::%s" % (archive))
221 projectB.query("INSERT INTO archive (name, origin_server, description) VALUES ('%s', '%s', '%s')"
222 % (archive, SubSec["OriginServer"], SubSec["Description"]))
224 def update_components ():
225 projectB.query("DELETE FROM component")
226 for component in Cnf.SubTree("Component").List():
227 SubSec = Cnf.SubTree("Component::%s" % (component))
228 projectB.query("INSERT INTO component (name, description, meets_dfsg) VALUES ('%s', '%s', '%s')" %
229 (component, SubSec["Description"], SubSec["MeetsDFSG"]))
231 def update_locations ():
232 projectB.query("DELETE FROM location")
233 for location in Cnf.SubTree("Location").List():
234 SubSec = Cnf.SubTree("Location::%s" % (location))
235 archive_id = daklib.database.get_archive_id(SubSec["archive"])
236 type = SubSec.Find("type")
237 if type == "legacy-mixed":
238 projectB.query("INSERT INTO location (path, archive, type) VALUES ('%s', %d, '%s')" % (location, archive_id, SubSec["type"]))
240 for component in Cnf.SubTree("Component").List():
241 component_id = daklib.database.get_component_id(component)
242 projectB.query("INSERT INTO location (path, component, archive, type) VALUES ('%s', %d, %d, '%s')" %
243 (location, component_id, archive_id, SubSec["type"]))
245 def update_architectures ():
246 projectB.query("DELETE FROM architecture")
247 for arch in Cnf.SubTree("Architectures").List():
248 projectB.query("INSERT INTO architecture (arch_string, description) VALUES ('%s', '%s')" % (arch, Cnf["Architectures::%s" % (arch)]))
250 def update_suites ():
251 projectB.query("DELETE FROM suite")
252 for suite in Cnf.SubTree("Suite").List():
253 SubSec = Cnf.SubTree("Suite::%s" %(suite))
254 projectB.query("INSERT INTO suite (suite_name) VALUES ('%s')" % suite.lower())
255 for i in ("Version", "Origin", "Description"):
256 if SubSec.has_key(i):
257 projectB.query("UPDATE suite SET %s = '%s' WHERE suite_name = '%s'" % (i.lower(), SubSec[i], suite.lower()))
258 for architecture in Cnf.ValueList("Suite::%s::Architectures" % (suite)):
259 architecture_id = daklib.database.get_architecture_id (architecture)
260 projectB.query("INSERT INTO suite_architectures (suite, architecture) VALUES (currval('suite_id_seq'), %d)" % (architecture_id))
262 def update_override_type():
263 projectB.query("DELETE FROM override_type")
264 for type in Cnf.ValueList("OverrideType"):
265 projectB.query("INSERT INTO override_type (type) VALUES ('%s')" % (type))
267 def update_priority():
268 projectB.query("DELETE FROM priority")
269 for priority in Cnf.SubTree("Priority").List():
270 projectB.query("INSERT INTO priority (priority, level) VALUES ('%s', %s)" % (priority, Cnf["Priority::%s" % (priority)]))
272 def update_section():
273 projectB.query("DELETE FROM section")
274 for component in Cnf.SubTree("Component").List():
275 if Cnf["Control-Overrides::ComponentPosition"] == "prefix":
277 if component != 'main':
278 prefix = component + '/'
283 if component != 'main':
284 suffix = '/' + component
287 for section in Cnf.ValueList("Section"):
288 projectB.query("INSERT INTO section (section) VALUES ('%s%s%s')" % (prefix, section, suffix))
290 def get_location_path(directory):
291 global location_path_cache
293 if location_path_cache.has_key(directory):
294 return location_path_cache[directory]
296 q = projectB.query("SELECT DISTINCT path FROM location WHERE path ~ '%s'" % (directory))
298 path = q.getresult()[0][0]
300 daklib.utils.fubar("[import-archive] get_location_path(): Couldn't get path for %s" % (directory))
301 location_path_cache[directory] = path
304 ################################################################################
306 def get_or_set_files_id (filename, size, md5sum, location_id):
307 global files_id_cache, files_id_serial, files_query_cache
309 cache_key = "_".join((filename, size, md5sum, repr(location_id)))
310 if not files_id_cache.has_key(cache_key):
312 files_query_cache.write("%d\t%s\t%s\t%s\t%d\t\\N\n" % (files_id_serial, filename, size, md5sum, location_id))
313 files_id_cache[cache_key] = files_id_serial
315 return files_id_cache[cache_key]
317 ###############################################################################
319 def process_sources (filename, suite, component, archive):
320 global source_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, source_id_serial, src_associations_id_serial, dsc_files_id_serial, source_cache_for_binaries, orig_tar_gz_cache, reject_message
322 suite = suite.lower()
323 suite_id = daklib.database.get_suite_id(suite)
325 file = daklib.utils.open_file (filename)
326 except daklib.utils.cant_open_exc:
327 daklib.utils.warn("can't open '%s'" % (filename))
329 Scanner = apt_pkg.ParseTagFile(file)
330 while Scanner.Step() != 0:
331 package = Scanner.Section["package"]
332 version = Scanner.Section["version"]
333 directory = Scanner.Section["directory"]
334 dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, daklib.utils.re_no_epoch.sub('', version)))
335 # Sometimes the Directory path is a lie; check in the pool
336 if not os.path.exists(dsc_file):
337 if directory.split('/')[0] == "dists":
338 directory = Cnf["Dir::PoolRoot"] + daklib.utils.poolify(package, component)
339 dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, daklib.utils.re_no_epoch.sub('', version)))
340 if not os.path.exists(dsc_file):
341 daklib.utils.fubar("%s not found." % (dsc_file))
342 install_date = time.strftime("%Y-%m-%d", time.localtime(os.path.getmtime(dsc_file)))
343 fingerprint = check_signature(dsc_file)
344 fingerprint_id = daklib.database.get_or_set_fingerprint_id(fingerprint)
346 daklib.utils.fubar("%s: %s" % (dsc_file, reject_message))
347 maintainer = Scanner.Section["maintainer"]
348 maintainer = maintainer.replace("'", "\\'")
349 maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
350 location = get_location_path(directory.split('/')[0])
351 location_id = daklib.database.get_location_id (location, component, archive)
352 if not directory.endswith("/"):
354 directory = poolify (directory, location)
355 if directory != "" and not directory.endswith("/"):
357 no_epoch_version = daklib.utils.re_no_epoch.sub('', version)
358 # Add all files referenced by the .dsc to the files table
360 for line in Scanner.Section["files"].split('\n'):
362 (md5sum, size, filename) = line.strip().split()
363 # Don't duplicate .orig.tar.gz's
364 if filename.endswith(".orig.tar.gz"):
365 cache_key = "%s_%s_%s" % (filename, size, md5sum)
366 if orig_tar_gz_cache.has_key(cache_key):
367 id = orig_tar_gz_cache[cache_key]
369 id = get_or_set_files_id (directory + filename, size, md5sum, location_id)
370 orig_tar_gz_cache[cache_key] = id
372 id = get_or_set_files_id (directory + filename, size, md5sum, location_id)
374 # If this is the .dsc itself; save the ID for later.
375 if filename.endswith(".dsc"):
377 filename = directory + package + '_' + no_epoch_version + '.dsc'
378 cache_key = "%s_%s" % (package, version)
379 if not source_cache.has_key(cache_key):
380 nasty_key = "%s_%s" % (package, version)
381 source_id_serial += 1
382 if not source_cache_for_binaries.has_key(nasty_key):
383 source_cache_for_binaries[nasty_key] = source_id_serial
384 tmp_source_id = source_id_serial
385 source_cache[cache_key] = source_id_serial
386 source_query_cache.write("%d\t%s\t%s\t%d\t%d\t%s\t%s\n" % (source_id_serial, package, version, maintainer_id, files_id, install_date, fingerprint_id))
388 dsc_files_id_serial += 1
389 dsc_files_query_cache.write("%d\t%d\t%d\n" % (dsc_files_id_serial, tmp_source_id,id))
391 tmp_source_id = source_cache[cache_key]
393 src_associations_id_serial += 1
394 src_associations_query_cache.write("%d\t%d\t%d\n" % (src_associations_id_serial, suite_id, tmp_source_id))
398 ###############################################################################
400 def process_packages (filename, suite, component, archive):
401 global arch_all_cache, binary_cache, binaries_id_serial, binaries_query_cache, bin_associations_id_serial, bin_associations_query_cache, reject_message
405 suite = suite.lower()
406 suite_id = daklib.database.get_suite_id(suite)
408 file = daklib.utils.open_file (filename)
409 except daklib.utils.cant_open_exc:
410 daklib.utils.warn("can't open '%s'" % (filename))
412 Scanner = apt_pkg.ParseTagFile(file)
413 while Scanner.Step() != 0:
414 package = Scanner.Section["package"]
415 version = Scanner.Section["version"]
416 maintainer = Scanner.Section["maintainer"]
417 maintainer = maintainer.replace("'", "\\'")
418 maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
419 architecture = Scanner.Section["architecture"]
420 architecture_id = daklib.database.get_architecture_id (architecture)
421 fingerprint = "NOSIG"
422 fingerprint_id = daklib.database.get_or_set_fingerprint_id(fingerprint)
423 if not Scanner.Section.has_key("source"):
426 source = Scanner.Section["source"]
428 if source.find("(") != -1:
429 m = daklib.utils.re_extract_src_version.match(source)
431 source_version = m.group(2)
432 if not source_version:
433 source_version = version
434 filename = Scanner.Section["filename"]
435 location = get_location_path(filename.split('/')[0])
436 location_id = daklib.database.get_location_id (location, component, archive)
437 filename = poolify (filename, location)
438 if architecture == "all":
439 filename = re_arch_from_filename.sub("binary-all", filename)
440 cache_key = "%s_%s" % (source, source_version)
441 source_id = source_cache_for_binaries.get(cache_key, None)
442 size = Scanner.Section["size"]
443 md5sum = Scanner.Section["md5sum"]
444 files_id = get_or_set_files_id (filename, size, md5sum, location_id)
445 type = "deb"; # FIXME
446 cache_key = "%s_%s_%s_%d_%d_%d_%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id)
447 if not arch_all_cache.has_key(cache_key):
448 arch_all_cache[cache_key] = 1
449 cache_key = "%s_%s_%s_%d" % (package, version, repr(source_id), architecture_id)
450 if not binary_cache.has_key(cache_key):
455 source_id = repr(source_id)
456 binaries_id_serial += 1
457 binaries_query_cache.write("%d\t%s\t%s\t%d\t%s\t%d\t%d\t%s\t%s\n" % (binaries_id_serial, package, version, maintainer_id, source_id, architecture_id, files_id, type, fingerprint_id))
458 binary_cache[cache_key] = binaries_id_serial
459 tmp_binaries_id = binaries_id_serial
461 tmp_binaries_id = binary_cache[cache_key]
463 bin_associations_id_serial += 1
464 bin_associations_query_cache.write("%d\t%d\t%d\n" % (bin_associations_id_serial, suite_id, tmp_binaries_id))
469 print "%d binary packages processed; %d with no source match which is %.2f%%" % (count_total, count_bad, (float(count_bad)/count_total)*100)
471 print "%d binary packages processed; 0 with no source match which is 0%%" % (count_total)
473 ###############################################################################
475 def do_sources(sources, suite, component, server):
476 temp_filename = daklib.utils.temp_filename()
477 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (sources, temp_filename))
479 daklib.utils.fubar("Gunzip invocation failed!\n%s" % (output), result)
480 print 'Processing '+sources+'...'
481 process_sources (temp_filename, suite, component, server)
482 os.unlink(temp_filename)
484 ###############################################################################
487 global Cnf, projectB, query_cache, files_query_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, bin_associations_query_cache, binaries_query_cache
489 Cnf = daklib.utils.get_conf()
490 Arguments = [('a', "action", "Import-Archive::Options::Action"),
491 ('h', "help", "Import-Archive::Options::Help")]
492 for i in [ "action", "help" ]:
493 if not Cnf.has_key("Import-Archive::Options::%s" % (i)):
494 Cnf["Import-Archive::Options::%s" % (i)] = ""
496 apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
498 Options = Cnf.SubTree("Import-Archive::Options")
502 if not Options["Action"]:
503 daklib.utils.warn("""no -a/--action given; not doing anything.
504 Please read the documentation before running this script.
508 print "Re-Creating DB..."
509 (result, output) = commands.getstatusoutput("psql -f init_pool.sql template1")
511 daklib.utils.fubar("psql invocation failed!\n", result)
514 projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
516 daklib.database.init (Cnf, projectB)
518 print "Adding static tables from conf file..."
519 projectB.query("BEGIN WORK")
520 update_architectures()
525 update_override_type()
528 projectB.query("COMMIT WORK")
530 files_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"files","w")
531 source_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"source","w")
532 src_associations_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"src_associations","w")
533 dsc_files_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"dsc_files","w")
534 binaries_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"binaries","w")
535 bin_associations_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"bin_associations","w")
537 projectB.query("BEGIN WORK")
538 # Process Sources files to popoulate `source' and friends
539 for location in Cnf.SubTree("Location").List():
540 SubSec = Cnf.SubTree("Location::%s" % (location))
541 server = SubSec["Archive"]
542 type = Cnf.Find("Location::%s::Type" % (location))
543 if type == "legacy-mixed":
544 sources = location + 'Sources.gz'
545 suite = Cnf.Find("Location::%s::Suite" % (location))
546 do_sources(sources, suite, "", server)
547 elif type == "legacy" or type == "pool":
548 for suite in Cnf.ValueList("Location::%s::Suites" % (location)):
549 for component in Cnf.SubTree("Component").List():
550 sources = Cnf["Dir::Root"] + "dists/" + Cnf["Suite::%s::CodeName" % (suite)] + '/' + component + '/source/' + 'Sources.gz'
551 do_sources(sources, suite, component, server)
553 daklib.utils.fubar("Unknown location type ('%s')." % (type))
555 # Process Packages files to populate `binaries' and friends
557 for location in Cnf.SubTree("Location").List():
558 SubSec = Cnf.SubTree("Location::%s" % (location))
559 server = SubSec["Archive"]
560 type = Cnf.Find("Location::%s::Type" % (location))
561 if type == "legacy-mixed":
562 packages = location + 'Packages'
563 suite = Cnf.Find("Location::%s::Suite" % (location))
564 print 'Processing '+location+'...'
565 process_packages (packages, suite, "", server)
566 elif type == "legacy" or type == "pool":
567 for suite in Cnf.ValueList("Location::%s::Suites" % (location)):
568 for component in Cnf.SubTree("Component").List():
569 architectures = filter(daklib.utils.real_arch,
570 Cnf.ValueList("Suite::%s::Architectures" % (suite)))
571 for architecture in architectures:
572 packages = Cnf["Dir::Root"] + "dists/" + Cnf["Suite::%s::CodeName" % (suite)] + '/' + component + '/binary-' + architecture + '/Packages'
573 print 'Processing '+packages+'...'
574 process_packages (packages, suite, component, server)
576 files_query_cache.close()
577 source_query_cache.close()
578 src_associations_query_cache.close()
579 dsc_files_query_cache.close()
580 binaries_query_cache.close()
581 bin_associations_query_cache.close()
582 print "Writing data to `files' table..."
583 projectB.query("COPY files FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"files"))
584 print "Writing data to `source' table..."
585 projectB.query("COPY source FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"source"))
586 print "Writing data to `src_associations' table..."
587 projectB.query("COPY src_associations FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"src_associations"))
588 print "Writing data to `dsc_files' table..."
589 projectB.query("COPY dsc_files FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"dsc_files"))
590 print "Writing data to `binaries' table..."
591 projectB.query("COPY binaries FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"binaries"))
592 print "Writing data to `bin_associations' table..."
593 projectB.query("COPY bin_associations FROM '%s'" % (Cnf["Import-Archive::ExportDir"]+"bin_associations"))
594 print "Committing..."
595 projectB.query("COMMIT WORK")
597 # Add the constraints and otherwise generally clean up the database.
598 # See add_constraints.sql for more details...
600 print "Running add_constraints.sql..."
601 (result, output) = commands.getstatusoutput("psql %s < add_constraints.sql" % (Cnf["DB::Name"]))
604 daklib.utils.fubar("psql invocation failed!\n%s" % (output), result)
608 ################################################################################
611 daklib.utils.try_with_debug(do_da_do_da)
613 ################################################################################
615 if __name__ == '__main__':