4 Generate file lists used by apt-ftparchive to generate Packages and Sources files
5 @contact: Debian FTP Master <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @license: GNU General Public License version 2 or later
10 # This program is free software; you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation; either version 2 of the License, or
13 # (at your option) any later version.
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
20 # You should have received a copy of the GNU General Public License
21 # along with this program; if not, write to the Free Software
22 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 ################################################################################
26 # <elmo> I'm doing it in python btw.. nothing against your monster
27 # SQL, but the python wins in terms of speed and readiblity
32 # <aj> you mock my SQL!!!!
33 # <elmo> you want have contest of skillz??????
34 # <aj> all your skillz are belong to my sql!!!!
35 # <elmo> yo momma are belong to my python!!!!
36 # <aj> yo momma was SQLin' like a pig last night!
38 ################################################################################
45 from daklib import database
46 from daklib import logging
47 from daklib import utils
49 ################################################################################
51 Cnf = None #: Configuration, apt_pkg.Configuration
52 projectB = None #: database connection, pgobject
53 Logger = None #: Logger object
54 Options = None #: Parsed CommandLine arguments
56 ################################################################################
58 def Dict(**dict): return dict
60 ################################################################################
62 def usage (exit_code=0):
63 print """Usage: dak make-suite-file-list [OPTION]
64 Write out file lists suitable for use with apt-ftparchive.
66 -a, --architecture=ARCH only write file lists for this architecture
67 -c, --component=COMPONENT only write file lists for this component
68 -f, --force ignore Untouchable suite directives in dak.conf
69 -h, --help show this help and exit
70 -n, --no-delete don't delete older versions
71 -s, --suite=SUITE only write file lists for this suite
73 ARCH, COMPONENT and SUITE can be space separated lists, e.g.
74 --architecture=\"m68k i386\""""
77 ################################################################################
79 def version_cmp(a, b):
80 return -apt_pkg.VersionCompare(a[0], b[0])
82 #####################################################
84 def delete_packages(delete_versions, pkg, dominant_arch, suite,
85 dominant_version, delete_table, delete_col, packages):
86 suite_id = database.get_suite_id(suite)
87 for version in delete_versions:
88 delete_unique_id = version[1]
89 if not packages.has_key(delete_unique_id):
91 delete_version = version[0]
92 delete_id = packages[delete_unique_id]["sourceid"]
93 delete_arch = packages[delete_unique_id]["arch"]
94 if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
95 if Options["No-Delete"]:
96 print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch)
98 Logger.log(["dominated", pkg, delete_arch, delete_version, dominant_version, dominant_arch])
99 projectB.query("DELETE FROM %s WHERE suite = %s AND %s = %s" % (delete_table, suite_id, delete_col, delete_id))
100 del packages[delete_unique_id]
102 if Options["No-Delete"]:
103 print "Would delete %s_%s_%s in favour of %s_%s, but %s is untouchable" % (pkg, delete_arch, delete_version, dominant_version, dominant_arch, suite)
105 Logger.log(["dominated but untouchable", pkg, delete_arch, delete_version, dominant_version, dominant_arch])
107 #####################################################
109 def resolve_arch_all_vs_any(versions, packages):
110 """ Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all """
111 arch_all_version = None
112 arch_any_versions = copy.copy(versions)
113 for i in arch_any_versions:
115 arch = packages[unique_id]["arch"]
117 arch_all_versions = [i]
118 arch_all_version = i[0]
119 arch_any_versions.remove(i)
120 # Sort arch: any versions into descending order
121 arch_any_versions.sort(version_cmp)
122 highest_arch_any_version = arch_any_versions[0][0]
124 pkg = packages[unique_id]["pkg"]
125 suite = packages[unique_id]["suite"]
126 delete_table = "bin_associations"
129 if apt_pkg.VersionCompare(highest_arch_any_version, arch_all_version) < 1:
130 # arch: all dominates
131 delete_packages(arch_any_versions, pkg, "all", suite,
132 arch_all_version, delete_table, delete_col, packages)
134 # arch: any dominates
135 delete_packages(arch_all_versions, pkg, "any", suite,
136 highest_arch_any_version, delete_table, delete_col,
139 #####################################################
141 def remove_duplicate_versions(versions, packages):
142 """ Per-suite&pkg&arch: resolve duplicate versions """
143 # Sort versions into descending order
144 versions.sort(version_cmp)
145 dominant_versions = versions[0]
146 dominated_versions = versions[1:]
147 (dominant_version, dominant_unqiue_id) = dominant_versions
148 pkg = packages[dominant_unqiue_id]["pkg"]
149 arch = packages[dominant_unqiue_id]["arch"]
150 suite = packages[dominant_unqiue_id]["suite"]
152 delete_table = "src_associations"
153 delete_col = "source"
155 delete_table = "bin_associations"
157 # Remove all but the highest
158 delete_packages(dominated_versions, pkg, arch, suite,
159 dominant_version, delete_table, delete_col, packages)
160 return [dominant_versions]
162 ################################################################################
164 def cleanup(packages):
165 # Build up the index used by the clean up functions
167 for unique_id in packages.keys():
168 suite = packages[unique_id]["suite"]
169 pkg = packages[unique_id]["pkg"]
170 arch = packages[unique_id]["arch"]
171 version = packages[unique_id]["version"]
172 d.setdefault(suite, {})
173 d[suite].setdefault(pkg, {})
174 d[suite][pkg].setdefault(arch, [])
175 d[suite][pkg][arch].append([version, unique_id])
176 # Clean up old versions
177 for suite in d.keys():
178 for pkg in d[suite].keys():
179 for arch in d[suite][pkg].keys():
180 versions = d[suite][pkg][arch]
181 if len(versions) > 1:
182 d[suite][pkg][arch] = remove_duplicate_versions(versions, packages)
184 # Arch: all -> any and vice versa
185 for suite in d.keys():
186 for pkg in d[suite].keys():
187 arches = d[suite][pkg]
188 # If we don't have any arch: all; we've nothing to do
189 if not arches.has_key("all"):
191 # Check to see if we have arch: all and arch: !all (ignoring source)
192 num_arches = len(arches.keys())
193 if arches.has_key("source"):
195 # If we do, remove the duplicates
198 for arch in arches.keys():
200 versions.extend(d[suite][pkg][arch])
201 resolve_arch_all_vs_any(versions, packages)
203 ################################################################################
205 def write_filelist(suite, component, arch, type, list, packages, dislocated_files):
206 # Work out the filename
209 arch = "debian-installer_binary-%s" % (arch)
211 arch = "binary-%s" % (arch)
212 filename = os.path.join(Cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch))
213 output = utils.open_file(filename, "w")
214 # Generate the final list of files
217 path = packages[fileid]["path"]
218 filename = packages[fileid]["filename"]
219 file_id = packages[fileid]["file_id"]
220 pkg = packages[fileid]["pkg"]
221 if suite == "stable" and dislocated_files.has_key(file_id):
222 filename = dislocated_files[file_id]
224 filename = path + filename
225 if files.has_key(pkg):
226 utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename))
228 files[pkg] = filename
229 # Sort the files since apt-ftparchive doesn't
232 # Write the list of files out
234 output.write(files[pkg]+'\n')
237 ################################################################################
239 def write_filelists(packages, dislocated_files):
240 # Build up the index to iterate over
242 for unique_id in packages.keys():
243 suite = packages[unique_id]["suite"]
244 component = packages[unique_id]["component"]
245 arch = packages[unique_id]["arch"]
246 packagetype = packages[unique_id]["filetype"]
247 d.setdefault(suite, {})
248 d[suite].setdefault(component, {})
249 d[suite][component].setdefault(arch, {})
250 d[suite][component][arch].setdefault(packagetype, [])
251 d[suite][component][arch][packagetype].append(unique_id)
252 # Flesh out the index
253 if not Options["Suite"]:
254 suites = Cnf.SubTree("Suite").List()
256 suites = utils.split_args(Options["Suite"])
257 for suite in [ i.lower() for i in suites ]:
258 d.setdefault(suite, {})
259 if not Options["Component"]:
260 components = Cnf.ValueList("Suite::%s::Components" % (suite))
262 components = utils.split_args(Options["Component"])
263 udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite))
264 for component in components:
265 d[suite].setdefault(component, {})
266 if component in udeb_components:
267 binary_types = [ "deb", "udeb" ]
269 binary_types = [ "deb" ]
270 if not Options["Architecture"]:
271 architectures = database.get_suite_architectures(suite)
273 architectures = utils.split_args(Options["Architectures"])
274 for arch in [ i.lower() for i in architectures ]:
275 d[suite][component].setdefault(arch, {})
280 for packagetype in types:
281 d[suite][component][arch].setdefault(packagetype, [])
283 for suite in d.keys():
284 if Cnf.has_key("Suite::%s::Components" % (suite)):
285 for component in d[suite].keys():
286 for arch in d[suite][component].keys():
289 for packagetype in d[suite][component][arch].keys():
290 filelist = d[suite][component][arch][packagetype]
291 # If it's a binary, we need to add in the arch: all debs too
293 archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
295 filelist.extend(d[archall_suite][component]["all"][packagetype])
296 elif d[suite][component].has_key("all") and \
297 d[suite][component]["all"].has_key(packagetype):
298 filelist.extend(d[suite][component]["all"][packagetype])
299 write_filelist(suite, component, arch, packagetype, filelist,
300 packages, dislocated_files)
301 else: # something broken
302 utils.warn("Suite %s has no components." % (suite))
304 ################################################################################
307 # If we're only doing a subset of suites, ensure we do enough to
308 # be able to do arch: all mapping.
310 suites = utils.split_args(Options["Suite"])
312 archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
313 if archall_suite and archall_suite not in suites:
314 utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
315 suites.append(archall_suite)
316 Options["Suite"] = ",".join(suites)
318 (con_suites, con_architectures, con_components, check_source) = \
319 utils.parse_args(Options)
321 dislocated_files = {}
324 SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name,
325 f.id, su.suite_name, b.type
326 FROM binaries b, bin_associations ba, architecture a, files f, location l,
327 component c, suite su
328 WHERE b.id = ba.bin AND b.file = f.id AND b.architecture = a.id
329 AND f.location = l.id AND l.component = c.id AND ba.suite = su.id
330 %s %s %s""" % (con_suites, con_architectures, con_components)
334 SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id,
336 FROM source s, src_associations sa, files f, location l, component c, suite su
337 WHERE s.id = sa.source AND s.file = f.id AND f.location = l.id
338 AND l.component = c.id AND sa.suite = su.id %s %s""" % (con_suites, con_components)
339 q = projectB.query(query)
341 # Build up the main index of packages
345 (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i
346 # 'id' comes from either 'binaries' or 'source', so it's not unique
348 packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
349 path=path, filename=filename,
350 component=component, file_id=file_id,
351 suite=suite, filetype = filetype)
353 write_filelists(packages, dislocated_files)
355 ################################################################################
358 global Cnf, projectB, Options, Logger
360 Cnf = utils.get_conf()
361 Arguments = [('a', "architecture", "Make-Suite-File-List::Options::Architecture", "HasArg"),
362 ('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"),
363 ('h', "help", "Make-Suite-File-List::Options::Help"),
364 ('n', "no-delete", "Make-Suite-File-List::Options::No-Delete"),
365 ('f', "force", "Make-Suite-File-List::Options::Force"),
366 ('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")]
367 for i in ["architecture", "component", "help", "no-delete", "suite", "force" ]:
368 if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
369 Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
370 apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
371 Options = Cnf.SubTree("Make-Suite-File-List::Options")
375 projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
376 database.init(Cnf, projectB)
377 Logger = logging.Logger(Cnf, "make-suite-file-list")
381 #########################################################################################
383 if __name__ == '__main__':