3 # Generate file lists used by apt-ftparchive to generate Packages and Sources files
4 # Copyright (C) 2000, 2001, 2002 James Troup <james@nocrew.org>
5 # $Id: jenna,v 1.18 2002-06-05 00:18:39 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ################################################################################
23 # <elmo> I'm doing it in python btw.. nothing against your monster
24 # SQL, but the python wins in terms of speed and readiblity
29 # <aj> you mock my SQL!!!!
30 # <elmo> you want have contest of skillz??????
31 # <aj> all your skillz are belong to my sql!!!!
32 # <elmo> yo momma are belong to my python!!!!
33 # <aj> yo momma was SQLin' like a pig last night!
35 ################################################################################
37 import copy, os, pg, string, sys;
39 import claire, db_access, logging, utils;
41 ################################################################################
48 ################################################################################
50 def Dict(**dict): return dict
52 ################################################################################
54 def usage (exit_code=0):
55 print """Usage: jenna [OPTION]
56 Write out file lists suitable for use with apt-ftparchive.
58 -a, --architecture=ARCH only write file lists for this architecture
59 -c, --component=COMPONENT only write file lists for this component
60 -h, --help show this help and exit
61 -n, --no-delete don't delete older versions
62 -s, --suite=SUITE only write file lists for this suite
64 ARCH, COMPONENT and SUITE can be space seperated lists, e.g.
65 --architecture=\"m68k i386\"""";
68 ################################################################################
70 # Handle -a, -c and -s arguments; returns them as SQL constraints
74 for suite in string.split(Options["Suite"]):
75 suite_id = db_access.get_suite_id(suite);
77 utils.warn("suite '%s' not recognised." % (suite));
79 suite_ids_list.append(suite_id);
81 con_suites = "AND su.id IN (%s)" % string.join(map(str, suite_ids_list), ", ");
83 utils.fubar("No valid suite given.");
87 if Options["Architecture"]:
90 for architecture in string.split(Options["Architecture"]):
91 if architecture == "source":
94 architecture_id = db_access.get_architecture_id(architecture);
95 if architecture_id == -1:
96 utils.warn("architecture '%s' not recognised." % (architecture));
98 arch_ids_list.append(architecture_id);
100 con_architectures = "AND a.id IN (%s)" % string.join(map(str, arch_ids_list), ", ");
103 utils.fubar("No valid architecture given.");
105 con_architectures = "";
108 if Options["Component"]:
109 component_ids_list = [];
110 for component in string.split(Options["Component"]):
111 component_id = db_access.get_component_id(component);
112 if component_id == -1:
113 utils.warn("component '%s' not recognised." % (component));
115 component_ids_list.append(component_id);
116 if component_ids_list:
117 con_components = "AND su.id IN (%s)" % string.join(map(str, component_ids_list), ", ");
119 utils.fubar("No valid component given.");
123 return (con_suites, con_architectures, con_components, check_source);
125 ################################################################################
127 def version_cmp(a, b):
128 return -apt_pkg.VersionCompare(a[0], b[0]);
130 #####################################################
132 def delete_packages(delete_versions, pkg, dominant_arch, suite,
133 dominant_version, delete_table, delete_col, packages):
134 suite_id = db_access.get_suite_id(suite);
135 for version in delete_versions:
136 delete_unique_id = version[1];
137 if not packages.has_key(delete_unique_id):
139 delete_version = version[0];
140 delete_id = packages[delete_unique_id]["id"];
141 delete_arch = packages[delete_unique_id]["arch"];
142 if not Cnf.Find("Suite::%s::Untouchable" % (suite)):
143 if Options["No-Delete"]:
144 print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch);
146 Logger.log(["dominated", pkg, delete_arch, delete_version, dominant_version, dominant_arch]);
147 projectB.query("DELETE FROM %s WHERE suite = %s AND %s = %s" % (delete_table, suite_id, delete_col, delete_id));
148 del packages[delete_unique_id];
150 if Options["No-Delete"]:
151 print "Would delete %s_%s_%s in favour of %s_%s, but %s is untouchable" % (pkg, delete_arch, delete_version, dominant_version, dominant_arch, suite);
153 Logger.log(["dominated but untouchable", pkg, delete_arch, delete_version, dominant_version, dominant_arch]);
155 #####################################################
157 # Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all
158 def resolve_arch_all_vs_any(versions, packages):
159 arch_all_version = None;
160 arch_any_versions = copy.copy(versions);
161 for i in arch_any_versions:
163 arch = packages[unique_id]["arch"];
165 arch_all_versions = i;
166 arch_all_version = i[0];
167 arch_any_versions.remove(i);
168 # Sort arch: any versions into descending order
169 arch_any_versions.sort(version_cmp);
170 highest_arch_any_version = arch_any_versions[0][0];
172 pkg = packages[unique_id]["pkg"];
173 suite = packages[unique_id]["suite"];
174 delete_table = "bin_associations";
177 if apt_pkg.VersionCompare(highest_arch_any_version, arch_all_version) != 1:
178 # arch: all dominates
179 delete_packages(arch_any_versions, pkg, "all", suite,
180 arch_all_version, delete_table, delete_col, packages);
182 # arch: any dominates
183 delete_packages(arch_all_versions, pkg, "any", suite,
184 highest_arch_any_version, delete_table, delete_col,
187 #####################################################
189 # Per-suite&pkg&arch: resolve duplicate versions
190 def remove_duplicate_versions(versions, packages):
191 # Sort versions into descending order
192 versions.sort(version_cmp);
193 dominant_versions = versions[0];
194 dominated_versions = versions[1:];
195 (dominant_version, dominant_unqiue_id) = dominant_versions;
196 pkg = packages[dominant_unqiue_id]["pkg"];
197 arch = packages[dominant_unqiue_id]["arch"];
198 suite = packages[dominant_unqiue_id]["suite"];
200 delete_table = "src_associations";
201 delete_col = "source";
203 delete_table = "bin_associations";
205 # Remove all but the highest
206 delete_packages(dominated_versions, pkg, arch, suite,
207 dominant_version, delete_table, delete_col, packages);
208 return dominant_versions;
210 ################################################################################
212 def cleanup(packages):
213 # Build up the index used by the clean up functions
215 for unique_id in packages.keys():
216 suite = packages[unique_id]["suite"];
217 pkg = packages[unique_id]["pkg"];
218 arch = packages[unique_id]["arch"];
219 version = packages[unique_id]["version"];
220 if not d.has_key(suite):
222 if not d[suite].has_key(pkg):
224 if not d[suite][pkg].has_key(arch):
225 d[suite][pkg][arch] = [];
226 d[suite][pkg][arch].append([version, unique_id]);
227 # Clean up old versions
228 for suite in d.keys():
229 for pkg in d[suite].keys():
230 for arch in d[suite][pkg].keys():
231 versions = d[suite][pkg][arch];
232 if len(versions) > 1:
233 d[suite][pkg][arch] = remove_duplicate_versions(versions, packages);
235 # Arch: all -> any and vice versa
236 for suite in d.keys():
237 for pkg in d[suite].keys():
238 arches = d[suite][pkg];
239 # If we don't have any arch: all; we've nothing to do
240 if not arches.has_key("all"):
242 # Check to see if we have arch: all and arch: !all (ignoring source)
243 num_arches = len(arches.keys());
244 if arches.has_key("source"):
245 num_arches = num_arches - 1;
246 # If we do, remove the duplicates
249 for arch in arches.keys():
251 versions.extend(d[suite][pkg][arch]);
252 remove_duplicate_versions(versions, packages);
254 ################################################################################
256 def write_legacy_mixed_filelist(suite, list, packages, dislocated_files):
257 # Work out the filename
258 filename = os.path.join(Cnf["Dir::Lists"], "%s_-_all.list" % (suite));
259 output = utils.open_file(filename, "w");
260 # Generate the final list of files
263 path = packages[id]["path"];
264 filename = packages[id]["filename"];
265 file_id = packages[id]["file_id"];
266 if suite == "stable" and dislocated_files.has_key(file_id):
267 filename = dislocated_files[file_id];
269 filename = path + filename;
270 if files.has_key(filename):
271 utils.warn("%s (in %s) is duplicated." % (filename, suite));
273 files[filename] = "";
274 # Sort the files since apt-ftparchive doesn't
277 # Write the list of files out
279 output.write(file+'\n')
282 ############################################################
284 def write_filelist(suite, component, arch, type, list, packages, dislocated_files):
285 # Work out the filename
288 arch = "debian-installer_binary-%s" % (arch);
290 arch = "binary-%s" % (arch);
291 filename = os.path.join(Cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch));
292 output = utils.open_file(filename, "w");
293 # Generate the final list of files
296 path = packages[id]["path"];
297 filename = packages[id]["filename"];
298 file_id = packages[id]["file_id"];
299 pkg = packages[id]["pkg"];
300 if suite == "stable" and dislocated_files.has_key(file_id):
301 filename = dislocated_files[file_id];
303 filename = path + filename;
304 if files.has_key(pkg):
305 utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename));
307 files[pkg] = filename;
308 # Sort the files since apt-ftparchive doesn't
311 # Write the list of files out
313 output.write(files[pkg]+'\n')
316 ################################################################################
318 def write_filelists(packages, dislocated_files):
319 # Build up the index to iterate over
321 for unique_id in packages.keys():
322 suite = packages[unique_id]["suite"];
323 component = packages[unique_id]["component"];
324 arch = packages[unique_id]["arch"];
325 type = packages[unique_id]["type"];
326 if not d.has_key(suite):
328 if not d[suite].has_key(component):
329 d[suite][component] = {};
330 if not d[suite][component].has_key(arch):
331 d[suite][component][arch] = {};
332 if not d[suite][component].has_key(arch):
333 d[suite][component][arch] = {};
334 if not d[suite][component][arch].has_key(type):
335 d[suite][component][arch][type] = [];
336 d[suite][component][arch][type].append(unique_id);
337 # Flesh out the index
338 if not Options["Suite"]:
339 suites = Cnf.SubTree("Suite").List();
341 suites = string.split(Options["Suite"]);
342 for suite in map(string.lower, suites):
343 if not d.has_key(suite):
345 if not Options["Component"]:
346 components = Cnf.ValueList("Suite::%s::Components" % (suite));
348 components = string.split(Options["Components"]);
349 udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite));
350 udeb_components = map(string.lower, udeb_components);
351 for component in map(string.lower, components):
352 if not d[suite].has_key(component):
353 d[suite][component] = {};
354 if component in udeb_components:
355 binary_types = [ "deb", "udeb" ];
357 binary_types = [ "deb" ];
358 if not Options["Architecture"]:
359 architectures = Cnf.ValueList("Suite::%s::Architectures" % (suite));
361 architectures = string.split(Options["Architectures"]);
362 for arch in map(string.lower, architectures):
363 if not d[suite][component].has_key(arch):
364 d[suite][component][arch] = {};
368 types = binary_types;
370 if not d[suite][component][arch].has_key(type):
371 d[suite][component][arch][type] = [];
373 for suite in d.keys():
374 if Cnf.has_key("Suite::%s::Components" % (suite)):
375 for component in d[suite].keys():
376 for arch in d[suite][component].keys():
379 for type in d[suite][component][arch].keys():
380 list = d[suite][component][arch][type];
381 # If it's a binary, we need to add in the arch: all debs too
382 if arch != "source" and d[suite][component].has_key("all") \
383 and d[suite][component]["all"].has_key(type):
384 list.extend(d[suite][component]["all"][type]);
385 write_filelist(suite, component, arch, type, list,
386 packages, dislocated_files);
387 else: # legacy-mixed suite
389 for component in d[suite].keys():
390 for arch in d[suite][component].keys():
391 for type in d[suite][component][arch].keys():
392 list.extend(d[suite][component][arch][type]);
393 write_legacy_mixed_filelist(suite, list, packages, dislocated_files);
395 ################################################################################
397 # Want to use stable dislocation support: True or false?
398 def stable_dislocation_p():
399 # If the support is not explicitly enabled, assume it's disabled
400 if not Cnf.FindB("Dinstall::StableDislocationSupport"):
402 # If we don't have a stable suite, obviously a no-op
403 if not Cnf.has_key("Suite::Stable"):
405 # If the suite(s) weren't explicitly listed, all suites are done
406 if not Options["Suite"]:
408 # Otherwise, look in what suites the user specified
409 suites = string.split(Options["Suite"]);
410 return suites.count("stable");
412 ################################################################################
415 (con_suites, con_architectures, con_components, check_source) = parse_args();
417 if stable_dislocation_p():
418 dislocated_files = claire.find_dislocated_stable(Cnf, projectB);
420 dislocated_files = {};
423 SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name,
424 f.id, su.suite_name, b.type
425 FROM binaries b, bin_associations ba, architecture a, files f, location l,
426 component c, suite su
427 WHERE b.id = ba.bin AND b.file = f.id AND b.architecture = a.id
428 AND f.location = l.id AND l.component = c.id AND ba.suite = su.id
429 %s %s %s""" % (con_suites, con_architectures, con_components);
433 SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id,
435 FROM source s, src_associations sa, files f, location l, component c, suite su
436 WHERE s.id = sa.source AND s.file = f.id AND f.location = l.id
437 AND l.component = c.id AND sa.suite = su.id %s %s""" % (con_suites, con_components);
438 q = projectB.query(query);
440 # Build up the main index of packages
444 (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i;
445 # 'id' comes from either 'binaries' or 'source', so it's not unique
446 unique_id = unique_id + 1;
447 packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version,
448 path=path, filename=filename,
449 component=component, file_id=file_id,
450 suite=suite, type = type);
452 write_filelists(packages, dislocated_files);
454 ################################################################################
457 global Cnf, projectB, Options, Logger;
459 Cnf = utils.get_conf();
460 Arguments = [('a', "architecture", "Jenna::Options::Architecture", "HasArg"),
461 ('c', "component", "Jenna::Options::Component", "HasArg"),
462 ('h', "help", "Jenna::Options::Help"),
463 ('n', "no-delete", "Jenna::Options::No-Delete"),
464 ('s', "suite", "Jenna::Options::Suite", "HasArg")];
465 for i in ["architecture", "component", "help", "no-delete", "suite" ]:
466 if not Cnf.has_key("Jenna::Options::%s" % (i)):
467 Cnf["Jenna::Options::%s" % (i)] = "";
468 apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv);
469 Options = Cnf.SubTree("Jenna::Options");
473 projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
474 db_access.init(Cnf, projectB);
475 Logger = logging.Logger(Cnf, "jenna");
479 #########################################################################################
481 if __name__ == '__main__':