From: James Troup Date: Wed, 5 Jun 2002 00:18:39 +0000 (+0000) Subject: rewritten, 10x faster. X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=93f1efa5d1e619cbba7b097373068e7bc5179d88;p=dak.git rewritten, 10x faster. --- diff --git a/jenna b/jenna index a88718e0..95cba74b 100755 --- a/jenna +++ b/jenna @@ -1,8 +1,8 @@ #!/usr/bin/env python -# Generate file list which is then fed to apt-ftparchive to generate Packages and Sources files +# Generate file lists used by apt-ftparchive to generate Packages and Sources files # Copyright (C) 2000, 2001, 2002 James Troup -# $Id: jenna,v 1.17 2002-05-14 15:34:02 troup Exp $ +# $Id: jenna,v 1.18 2002-06-05 00:18:39 troup Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -20,21 +20,34 @@ ################################################################################ -# BTAF: "GOD *DAMMIT*!! What the FUCK happened to my free will??" -# -# -- http://www.angryflower.com/timelo.gif +# I'm doing it in python btw.. nothing against your monster +# SQL, but the python wins in terms of speed and readiblity +# bah +# you suck!!!!! +# sorry :( +# you are not!!! +# you mock my SQL!!!! +# you want have contest of skillz?????? +# all your skillz are belong to my sql!!!! +# yo momma are belong to my python!!!! +# yo momma was SQLin' like a pig last night! ################################################################################ -import pg, string, os, sys -import apt_pkg -import db_access, utils, claire, logging +import copy, os, pg, string, sys; +import apt_pkg; +import claire, db_access, logging, utils; ################################################################################ -projectB = None -Cnf = None +projectB = None; +Cnf = None; Logger = None; +Options = None; + +################################################################################ + +def Dict(**dict): return dict ################################################################################ @@ -44,242 +57,426 @@ Write out file lists suitable for use with apt-ftparchive. -a, --architecture=ARCH only write file lists for this architecture -c, --component=COMPONENT only write file lists for this component - -s, --suite=SUITE only write file lists for this suite -h, --help show this help and exit + -n, --no-delete don't delete older versions + -s, --suite=SUITE only write file lists for this suite ARCH, COMPONENT and SUITE can be space seperated lists, e.g. - --architecture=\"m68k i386\"""" - sys.exit(exit_code) + --architecture=\"m68k i386\""""; + sys.exit(exit_code); ################################################################################ -def generate_src_list(suite, component, output, dislocated_files): - sources = {} - - suite_id = db_access.get_suite_id(suite); - - if component == "-": - q = projectB.query("SELECT s.source, l.path, f.filename, f.id FROM source s, src_associations sa, location l, files f WHERE sa.source = s.id AND sa.suite = '%d' AND l.id = f.location AND s.file = f.id ORDER BY s.source, s.version" - % (suite_id)); +# Handle -a, -c and -s arguments; returns them as SQL constraints +def parse_args(): + if Options["Suite"]: + suite_ids_list = []; + for suite in string.split(Options["Suite"]): + suite_id = db_access.get_suite_id(suite); + if suite_id == -1: + utils.warn("suite '%s' not recognised." % (suite)); + else: + suite_ids_list.append(suite_id); + if suite_ids_list: + con_suites = "AND su.id IN (%s)" % string.join(map(str, suite_ids_list), ", "); + else: + utils.fubar("No valid suite given."); else: - q = projectB.query("SELECT s.source, l.path, f.filename, f.id FROM source s, src_associations sa, location l, component c, files f WHERE lower(c.name) = '%s' AND (c.id = l.component OR l.component = NULL) AND sa.source = s.id AND sa.suite = '%d' AND l.id = f.location AND s.file = f.id ORDER BY s.source, s.version" - % (component, suite_id)); - entries = q.getresult(); - for entry in entries: - (source, path, filename, file_id) = entry; - if dislocated_files.has_key(file_id): - filename = dislocated_files[file_id]; + con_suites = ""; + + if Options["Architecture"]: + arch_ids_list = []; + check_source = 0; + for architecture in string.split(Options["Architecture"]): + if architecture == "source": + check_source = 1; + else: + architecture_id = db_access.get_architecture_id(architecture); + if architecture_id == -1: + utils.warn("architecture '%s' not recognised." % (architecture)); + else: + arch_ids_list.append(architecture_id); + if arch_ids_list: + con_architectures = "AND a.id IN (%s)" % string.join(map(str, arch_ids_list), ", "); else: - filename = path + filename; - if sources.has_key(source): - utils.warn("%s in %s / %s / source is duplicated." % (source, suite, component)); + if not check_source: + utils.fubar("No valid architecture given."); + else: + con_architectures = ""; + check_source = 1; + + if Options["Component"]: + component_ids_list = []; + for component in string.split(Options["Component"]): + component_id = db_access.get_component_id(component); + if component_id == -1: + utils.warn("component '%s' not recognised." % (component)); + else: + component_ids_list.append(component_id); + if component_ids_list: + con_components = "AND su.id IN (%s)" % string.join(map(str, component_ids_list), ", "); else: - sources[source] = filename; + utils.fubar("No valid component given."); + else: + con_components = ""; - # Write the list of files out - source_keys = sources.keys(); - source_keys.sort(); - for source in source_keys: - output.write(sources[source]+'\n') + return (con_suites, con_architectures, con_components, check_source); -######################################################################################### +################################################################################ -def generate_bin_list(suite, component, architecture, output, type, dislocated_files): - packages = {} +def version_cmp(a, b): + return -apt_pkg.VersionCompare(a[0], b[0]); - suite_id = db_access.get_suite_id(suite); +##################################################### - if component == "-": - q = projectB.query("SELECT b.package, l.path, f.filename, f.id FROM architecture a, binaries b, bin_associations ba, location l, files f WHERE ( a.arch_string = '%s' OR a.arch_string = 'all' ) AND a.id = b.architecture AND ba.bin = b.id AND ba.suite = '%d' AND l.id = f.location AND b.file = f.id AND b.type = '%s' ORDER BY b.package, b.version, a.arch_string" % (architecture, suite_id, type)); +def delete_packages(delete_versions, pkg, dominant_arch, suite, + dominant_version, delete_table, delete_col, packages): + suite_id = db_access.get_suite_id(suite); + for version in delete_versions: + delete_unique_id = version[1]; + if not packages.has_key(delete_unique_id): + continue; + delete_version = version[0]; + delete_id = packages[delete_unique_id]["id"]; + delete_arch = packages[delete_unique_id]["arch"]; + if not Cnf.Find("Suite::%s::Untouchable" % (suite)): + if Options["No-Delete"]: + print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch); + else: + Logger.log(["dominated", pkg, delete_arch, delete_version, dominant_version, dominant_arch]); + projectB.query("DELETE FROM %s WHERE suite = %s AND %s = %s" % (delete_table, suite_id, delete_col, delete_id)); + del packages[delete_unique_id]; + else: + if Options["No-Delete"]: + print "Would delete %s_%s_%s in favour of %s_%s, but %s is untouchable" % (pkg, delete_arch, delete_version, dominant_version, dominant_arch, suite); + else: + Logger.log(["dominated but untouchable", pkg, delete_arch, delete_version, dominant_version, dominant_arch]); + +##################################################### + +# Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all +def resolve_arch_all_vs_any(versions, packages): + arch_all_version = None; + arch_any_versions = copy.copy(versions); + for i in arch_any_versions: + unique_id = i[1]; + arch = packages[unique_id]["arch"]; + if arch == "all": + arch_all_versions = i; + arch_all_version = i[0]; + arch_any_versions.remove(i); + # Sort arch: any versions into descending order + arch_any_versions.sort(version_cmp); + highest_arch_any_version = arch_any_versions[0][0]; + + pkg = packages[unique_id]["pkg"]; + suite = packages[unique_id]["suite"]; + delete_table = "bin_associations"; + delete_col = "bin"; + + if apt_pkg.VersionCompare(highest_arch_any_version, arch_all_version) != 1: + # arch: all dominates + delete_packages(arch_any_versions, pkg, "all", suite, + arch_all_version, delete_table, delete_col, packages); else: - q = projectB.query("SELECT b.package, l.path, f.filename, f.id FROM architecture a, binaries b, bin_associations ba, location l, component c, files f WHERE lower(c.name) = '%s' AND (c.id = l.component OR l.component = NULL) AND (a.arch_string = '%s' OR a.arch_string = 'all') AND a.id = b.architecture AND ba.bin = b.id AND ba.suite = '%d' AND l.id = f.location AND b.file = f.id AND b.type = '%s' ORDER BY b.package, b.version, a.arch_string" % (component, architecture, suite_id, type)); - entries = q.getresult(); - for entry in entries: - (package, path, filename, file_id) = entry; - if dislocated_files.has_key(file_id): + # arch: any dominates + delete_packages(arch_all_versions, pkg, "any", suite, + highest_arch_any_version, delete_table, delete_col, + packages); + +##################################################### + +# Per-suite&pkg&arch: resolve duplicate versions +def remove_duplicate_versions(versions, packages): + # Sort versions into descending order + versions.sort(version_cmp); + dominant_versions = versions[0]; + dominated_versions = versions[1:]; + (dominant_version, dominant_unqiue_id) = dominant_versions; + pkg = packages[dominant_unqiue_id]["pkg"]; + arch = packages[dominant_unqiue_id]["arch"]; + suite = packages[dominant_unqiue_id]["suite"]; + if arch == "source": + delete_table = "src_associations"; + delete_col = "source"; + else: # !source + delete_table = "bin_associations"; + delete_col = "bin"; + # Remove all but the highest + delete_packages(dominated_versions, pkg, arch, suite, + dominant_version, delete_table, delete_col, packages); + return dominant_versions; + +################################################################################ + +def cleanup(packages): + # Build up the index used by the clean up functions + d = {}; + for unique_id in packages.keys(): + suite = packages[unique_id]["suite"]; + pkg = packages[unique_id]["pkg"]; + arch = packages[unique_id]["arch"]; + version = packages[unique_id]["version"]; + if not d.has_key(suite): + d[suite] = {}; + if not d[suite].has_key(pkg): + d[suite][pkg] = {}; + if not d[suite][pkg].has_key(arch): + d[suite][pkg][arch] = []; + d[suite][pkg][arch].append([version, unique_id]); + # Clean up old versions + for suite in d.keys(): + for pkg in d[suite].keys(): + for arch in d[suite][pkg].keys(): + versions = d[suite][pkg][arch]; + if len(versions) > 1: + d[suite][pkg][arch] = remove_duplicate_versions(versions, packages); + + # Arch: all -> any and vice versa + for suite in d.keys(): + for pkg in d[suite].keys(): + arches = d[suite][pkg]; + # If we don't have any arch: all; we've nothing to do + if not arches.has_key("all"): + continue; + # Check to see if we have arch: all and arch: !all (ignoring source) + num_arches = len(arches.keys()); + if arches.has_key("source"): + num_arches = num_arches - 1; + # If we do, remove the duplicates + if num_arches > 1: + versions = []; + for arch in arches.keys(): + if arch != "source": + versions.extend(d[suite][pkg][arch]); + remove_duplicate_versions(versions, packages); + +################################################################################ + +def write_legacy_mixed_filelist(suite, list, packages, dislocated_files): + # Work out the filename + filename = os.path.join(Cnf["Dir::Lists"], "%s_-_all.list" % (suite)); + output = utils.open_file(filename, "w"); + # Generate the final list of files + files = {}; + for id in list: + path = packages[id]["path"]; + filename = packages[id]["filename"]; + file_id = packages[id]["file_id"]; + if suite == "stable" and dislocated_files.has_key(file_id): filename = dislocated_files[file_id]; else: filename = path + filename; - if packages.has_key(package): - utils.warn("%s in %s / %s / %s / %s is duplicated." % (package, suite, component, architecture, type)); + if files.has_key(filename): + utils.warn("%s (in %s) is duplicated." % (filename, suite)); else: - packages[package] = filename; - + files[filename] = ""; + # Sort the files since apt-ftparchive doesn't + keys = files.keys(); + keys.sort(); # Write the list of files out - package_keys = packages.keys(); - package_keys.sort(); - for package in package_keys: - output.write(packages[package]+'\n') + for file in keys: + output.write(file+'\n') + output.close(); + +############################################################ + +def write_filelist(suite, component, arch, type, list, packages, dislocated_files): + # Work out the filename + if arch != "source": + if type == "udeb": + arch = "debian-installer_binary-%s" % (arch); + elif type == "deb": + arch = "binary-%s" % (arch); + filename = os.path.join(Cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch)); + output = utils.open_file(filename, "w"); + # Generate the final list of files + files = {}; + for id in list: + path = packages[id]["path"]; + filename = packages[id]["filename"]; + file_id = packages[id]["file_id"]; + pkg = packages[id]["pkg"]; + if suite == "stable" and dislocated_files.has_key(file_id): + filename = dislocated_files[file_id]; + else: + filename = path + filename; + if files.has_key(pkg): + utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename)); + else: + files[pkg] = filename; + # Sort the files since apt-ftparchive doesn't + pkgs = files.keys(); + pkgs.sort(); + # Write the list of files out + for pkg in pkgs: + output.write(files[pkg]+'\n') + output.close(); -######################################################################################### +################################################################################ -########## -# I'm doing it in python btw.. nothing against your monster -# SQL, but the python wins in terms of speed and readiblity -# bah -# you suck!!!!! -# sorry :( -# you are not!!! -# you mock my SQL!!!! -# you want have contest of skillz?????? -# all your skillz are belong to my sql!!!! -# yo momma are belong to my python!!!! -# yo momma was SQLin' like a pig last night! -########## - -# If something has gone from arch:all to arch:any or vice-versa, -# clean out the old versions here. The rest of jenna won't do this -# because it's lame. I have no idea. - -def clean_duplicate_packages(suite): - Logger.log(["Cleaning duplicate packages", suite]); - - suite_id = db_access.get_suite_id(suite) - q = projectB.query(""" -SELECT b1.package, - b1.id, b1.version, a1.arch_string, - b2.id, b2.version, a2.arch_string - FROM bin_associations ba1, binaries b1, architecture a1, - bin_associations ba2, binaries b2, architecture a2 - WHERE ba1.suite = ba2.suite AND ba1.suite = %s - AND ba1.bin = b1.id AND b1.architecture = a1.id - AND ba2.bin = b2.id AND b2.architecture = a2.id - AND b1.package = b2.package - AND (a1.id = a2.id OR a1.arch_string = 'all' OR a2.arch_string = 'all') - AND b1.id != b2.id - AND versioncmp(b1.version, b2.version) <= 0 -ORDER BY b1.package, b1.version, a1.arch_string;""" % (suite_id)) - - ql = q.getresult() - seen = {} - for i in ql: - (package, oldid, oldver, oldarch, newid, newver, newarch) = i - if not seen.has_key(oldid): - seen[oldid] = newid - Logger.log(["Removing", package, oldver, oldarch, newver, newarch]); - projectB.query("DELETE FROM bin_associations WHERE suite = %s AND bin = %s" % (suite_id, oldid)) - else: - Logger.log(["Superceded", package, oldver, oldarch, newver, newarch]); - -# If something has moved from one component to another we need to -# clean out the old versions here. The rest of jenna won't do this -# because it works on a per-component level for flexibility. - -def clean_suite (suite): - Logger.log(["Cleaning out packages", suite]); - - suite_id = db_access.get_suite_id(suite) - q = projectB.query(""" -SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name - FROM binaries b, bin_associations ba, files f, location l, architecture a, component c - WHERE ba.suite = %s AND ba.bin = b.id AND b.file = f.id AND - f.location = l.id AND l.component = c.id AND b.architecture = a.id -UNION -SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name - FROM source s, src_associations sa, files f, location l, component c - WHERE sa.suite = %s AND sa.source = s.id AND s.file = f.id AND - f.location = l.id AND l.component = c.id;""" % (suite_id, suite_id)); - ql = q.getresult(); +def write_filelists(packages, dislocated_files): + # Build up the index to iterate over d = {}; - for i in ql: - (id, package, architecture, version, path, filename, component) = i; - filename = path + filename; - if architecture == "source": - delete_table = "src_associations"; - delete_col = "source"; + for unique_id in packages.keys(): + suite = packages[unique_id]["suite"]; + component = packages[unique_id]["component"]; + arch = packages[unique_id]["arch"]; + type = packages[unique_id]["type"]; + if not d.has_key(suite): + d[suite] = {}; + if not d[suite].has_key(component): + d[suite][component] = {}; + if not d[suite][component].has_key(arch): + d[suite][component][arch] = {}; + if not d[suite][component].has_key(arch): + d[suite][component][arch] = {}; + if not d[suite][component][arch].has_key(type): + d[suite][component][arch][type] = []; + d[suite][component][arch][type].append(unique_id); + # Flesh out the index + if not Options["Suite"]: + suites = Cnf.SubTree("Suite").List(); + else: + suites = string.split(Options["Suite"]); + for suite in map(string.lower, suites): + if not d.has_key(suite): + d[suite] = {}; + if not Options["Component"]: + components = Cnf.ValueList("Suite::%s::Components" % (suite)); else: - delete_table = "bin_associations"; - delete_col = "bin"; - key = "%s~%s" % (package, architecture); - if os.path.exists(filename): - if d.has_key(key): - (other_version, other_component, other_id) = d[key]; - if apt_pkg.VersionCompare(version, other_version) != 1: - (keep_version, keep_component) = (other_version, other_component) - (delete_id, delete_version, delete_component) = (id, version, component) - else: - (keep_version, keep_component) = (version, component) - (delete_id, delete_version, delete_component) = (other_id, other_version, other_component) - d[key] = (version, component, id); - if not Cnf.Find("Suite::%s::Untouchable" % (suite)): - Logger.log(["deleting", package, architecture, delete_version, delete_component, keep_version, keep_component]); - projectB.query("DELETE FROM %s WHERE suite = %s AND %s = %s" % (delete_table, suite_id, delete_col, delete_id)); - else: - Logger.log(["[untouchable]", package, architecture, delete_version, delete_component, keep_version, keep_component]); + components = string.split(Options["Components"]); + udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite)); + udeb_components = map(string.lower, udeb_components); + for component in map(string.lower, components): + if not d[suite].has_key(component): + d[suite][component] = {}; + if component in udeb_components: + binary_types = [ "deb", "udeb" ]; else: - d[key] = (version, component, id); - else: - utils.warn("%s is in %s but doesn't appear to exist?" % (filename, suite)); + binary_types = [ "deb" ]; + if not Options["Architecture"]: + architectures = Cnf.ValueList("Suite::%s::Architectures" % (suite)); + else: + architectures = string.split(Options["Architectures"]); + for arch in map(string.lower, architectures): + if not d[suite][component].has_key(arch): + d[suite][component][arch] = {}; + if arch == "source": + types = [ "dsc" ]; + else: + types = binary_types; + for type in types: + if not d[suite][component][arch].has_key(type): + d[suite][component][arch][type] = []; + # Then walk it + for suite in d.keys(): + if Cnf.has_key("Suite::%s::Components" % (suite)): + for component in d[suite].keys(): + for arch in d[suite][component].keys(): + if arch == "all": + continue; + for type in d[suite][component][arch].keys(): + list = d[suite][component][arch][type]; + # If it's a binary, we need to add in the arch: all debs too + if arch != "source" and d[suite][component].has_key("all") \ + and d[suite][component]["all"].has_key(type): + list.extend(d[suite][component]["all"][type]); + write_filelist(suite, component, arch, type, list, + packages, dislocated_files); + else: # legacy-mixed suite + list = []; + for component in d[suite].keys(): + for arch in d[suite][component].keys(): + for type in d[suite][component][arch].keys(): + list.extend(d[suite][component][arch][type]); + write_legacy_mixed_filelist(suite, list, packages, dislocated_files); -######################################################################################### +################################################################################ -def main(): - global Cnf, projectB, Logger; - dislocated_files = {}; +# Want to use stable dislocation support: True or false? +def stable_dislocation_p(): + # If the support is not explicitly enabled, assume it's disabled + if not Cnf.FindB("Dinstall::StableDislocationSupport"): + return 0; + # If we don't have a stable suite, obviously a no-op + if not Cnf.has_key("Suite::Stable"): + return 0; + # If the suite(s) weren't explicitly listed, all suites are done + if not Options["Suite"]: + return 1; + # Otherwise, look in what suites the user specified + suites = string.split(Options["Suite"]); + return suites.count("stable"); + +################################################################################ + +def do_da_do_da(): + (con_suites, con_architectures, con_components, check_source) = parse_args(); - Cnf = utils.get_conf() + if stable_dislocation_p(): + dislocated_files = claire.find_dislocated_stable(Cnf, projectB); + else: + dislocated_files = {}; + + query = """ +SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name, + f.id, su.suite_name, b.type + FROM binaries b, bin_associations ba, architecture a, files f, location l, + component c, suite su + WHERE b.id = ba.bin AND b.file = f.id AND b.architecture = a.id + AND f.location = l.id AND l.component = c.id AND ba.suite = su.id + %s %s %s""" % (con_suites, con_architectures, con_components); + if check_source: + query = query + """ +UNION +SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id, + su.suite_name, 'dsc' + FROM source s, src_associations sa, files f, location l, component c, suite su + WHERE s.id = sa.source AND s.file = f.id AND f.location = l.id + AND l.component = c.id AND sa.suite = su.id %s %s""" % (con_suites, con_components); + q = projectB.query(query); + ql = q.getresult(); + # Build up the main index of packages + packages = {}; + unique_id = 0; + for i in ql: + (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i; + # 'id' comes from either 'binaries' or 'source', so it's not unique + unique_id = unique_id + 1; + packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version, + path=path, filename=filename, + component=component, file_id=file_id, + suite=suite, type = type); + cleanup(packages); + write_filelists(packages, dislocated_files); - Arguments = [('a',"architecture","Jenna::Options::Architecture", "HasArg"), - ('c',"component","Jenna::Options::Component", "HasArg"), - ('h',"help","Jenna::Options::Help"), - ('s',"suite", "Jenna::Options::Suite", "HasArg")]; +################################################################################ - for i in ["architecture", "component", "help", "suite" ]: +def main(): + global Cnf, projectB, Options, Logger; + + Cnf = utils.get_conf(); + Arguments = [('a', "architecture", "Jenna::Options::Architecture", "HasArg"), + ('c', "component", "Jenna::Options::Component", "HasArg"), + ('h', "help", "Jenna::Options::Help"), + ('n', "no-delete", "Jenna::Options::No-Delete"), + ('s', "suite", "Jenna::Options::Suite", "HasArg")]; + for i in ["architecture", "component", "help", "no-delete", "suite" ]: if not Cnf.has_key("Jenna::Options::%s" % (i)): Cnf["Jenna::Options::%s" % (i)] = ""; - apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv); Options = Cnf.SubTree("Jenna::Options"); - if Options["Help"]: usage(); projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])); db_access.init(Cnf, projectB); Logger = logging.Logger(Cnf, "jenna"); - - if Options["Suite"] == "": - Options["Suite"] = string.join(Cnf.SubTree("Suite").List()); - for suite in string.split(Options["Suite"]): - suite = string.lower(suite); - if suite == 'stable': - dislocated_files = claire.find_dislocated_stable(Cnf, projectB); - else: - dislocated_files = {}; - clean_suite(suite); - clean_duplicate_packages(suite) - components = Options["Component"]; - if not Cnf.has_key("Suite::%s::Components" % (suite)): - components = "-"; - if components == "": - components = string.join(Cnf.ValueList("Suite::%s::Components" % (suite))); - for component in string.split(components): - component = string.lower(component) - architectures = Options["Architecture"]; - if architectures == "": - architectures = string.join(Cnf.ValueList("Suite::%s::Architectures" % (suite))); - for architecture in string.split(architectures): - architecture = string.lower(architecture) - if architecture == "all": - continue - if architecture == "source": - Logger.log(["Processing dists/%s/%s/%s..." % (suite, component, architecture)]); - output = utils.open_file("%s/%s_%s_%s.list" % (Cnf["Dir::Lists"], suite, component, architecture), "w") - generate_src_list(suite, component, output, dislocated_files); - output.close(); - else: - Logger.log(["Processing dists/%s/%s/binary-%s..." % (suite, component, architecture)]); - output = utils.open_file("%s/%s_%s_binary-%s.list" % (Cnf["Dir::Lists"], suite, component, architecture), "w"); - generate_bin_list(suite, component, architecture, output, "deb", dislocated_files); - output.close(); - if component == "main" and (suite == "unstable" or suite == "testing") and Cnf.has_key("Section::debian-installer"): # FIXME: must be a cleaner way to say debian-installer is main only? - Logger.log(["Processing dists/%s/%s/debian-installer/binary-%s..." % (suite,component, architecture)]); - output = utils.open_file("%s/%s_%s_debian-installer_binary-%s.list" % (Cnf["Dir::Lists"], suite, component, architecture), "w"); - generate_bin_list(suite, component, architecture, output, "udeb", dislocated_files); - output.close(); + do_da_do_da(); Logger.close(); ######################################################################################### if __name__ == '__main__': - main() + main();