4 Generate file lists used by apt-ftparchive to generate Packages and Sources files
5 @contact: Debian FTP Master <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @license: GNU General Public License version 2 or later
10 # This program is free software; you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation; either version 2 of the License, or
13 # (at your option) any later version.
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
20 # You should have received a copy of the GNU General Public License
21 # along with this program; if not, write to the Free Software
22 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 ################################################################################
26 # <elmo> I'm doing it in python btw.. nothing against your monster
27 # SQL, but the python wins in terms of speed and readiblity
32 # <aj> you mock my SQL!!!!
33 # <elmo> you want have contest of skillz??????
34 # <aj> all your skillz are belong to my sql!!!!
35 # <elmo> yo momma are belong to my python!!!!
36 # <aj> yo momma was SQLin' like a pig last night!
38 ################################################################################
45 from daklib.dbconn import *
46 from daklib.config import Config
47 from daklib import daklog
48 from daklib import utils
50 ################################################################################
52 Logger = None #: Logger object
53 Options = None #: Parsed CommandLine arguments
55 ################################################################################
57 def usage (exit_code=0):
58 print """Usage: dak make-suite-file-list [OPTION]
59 Write out file lists suitable for use with apt-ftparchive.
61 -a, --architecture=ARCH only write file lists for this architecture
62 -c, --component=COMPONENT only write file lists for this component
63 -f, --force ignore Untouchable suite directives in dak.conf
64 -h, --help show this help and exit
65 -n, --no-delete don't delete older versions
66 -s, --suite=SUITE only write file lists for this suite
68 ARCH, COMPONENT and SUITE can be space separated lists, e.g.
69 --architecture=\"m68k i386\""""
72 ################################################################################
74 def version_cmp(a, b):
75 return -apt_pkg.VersionCompare(a[0], b[0])
77 #####################################################
79 def delete_packages(delete_versions, pkg, dominant_arch, suite,
80 dominant_version, delete_table, delete_col, packages, session):
81 suite_o = get_suite(suite.lower(), session)
82 suite_id = suite_o.suite_id
83 for version in delete_versions:
84 delete_unique_id = version[1]
85 if not packages.has_key(delete_unique_id):
87 delete_version = version[0]
88 delete_id = packages[delete_unique_id]["sourceid"]
89 delete_arch = packages[delete_unique_id]["arch"]
90 if Options["Force"] or not suite_o.untouchable:
91 if Options["No-Delete"]:
92 print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch)
94 Logger.log(["dominated", pkg, delete_arch, delete_version, dominant_version, dominant_arch])
96 session.execute("DELETE FROM %s WHERE suite = :suiteid AND %s = :delid" % (delete_table, delete_col), {'suiteid': suite_id, 'delid': delete_id})
97 del packages[delete_unique_id]
99 if Options["No-Delete"]:
100 print "Would delete %s_%s_%s in favour of %s_%s, but %s is untouchable" % (pkg, delete_arch, delete_version, dominant_version, dominant_arch, suite)
102 Logger.log(["dominated but untouchable", pkg, delete_arch, delete_version, dominant_version, dominant_arch])
104 #####################################################
106 def resolve_arch_all_vs_any(versions, packages, session):
107 """ Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all """
108 arch_all_version = None
109 arch_any_versions = copy.copy(versions)
110 for i in arch_any_versions:
112 arch = packages[unique_id]["arch"]
114 arch_all_versions = [i]
115 arch_all_version = i[0]
116 arch_any_versions.remove(i)
117 # Sort arch: any versions into descending order
118 arch_any_versions.sort(version_cmp)
119 highest_arch_any_version = arch_any_versions[0][0]
121 pkg = packages[unique_id]["pkg"]
122 suite = packages[unique_id]["suite"]
123 delete_table = "bin_associations"
126 if apt_pkg.VersionCompare(highest_arch_any_version, arch_all_version) < 1:
127 # arch: all dominates
128 delete_packages(arch_any_versions, pkg, "all", suite,
129 arch_all_version, delete_table, delete_col, packages, session)
131 # arch: any dominates
132 delete_packages(arch_all_versions, pkg, "any", suite,
133 highest_arch_any_version, delete_table, delete_col,
136 #####################################################
138 def remove_duplicate_versions(versions, packages, session):
139 """ Per-suite&pkg&arch: resolve duplicate versions """
140 # Sort versions into descending order
141 versions.sort(version_cmp)
142 dominant_versions = versions[0]
143 dominated_versions = versions[1:]
144 (dominant_version, dominant_unqiue_id) = dominant_versions
145 pkg = packages[dominant_unqiue_id]["pkg"]
146 arch = packages[dominant_unqiue_id]["arch"]
147 suite = packages[dominant_unqiue_id]["suite"]
149 delete_table = "src_associations"
150 delete_col = "source"
152 delete_table = "bin_associations"
154 # Remove all but the highest
155 delete_packages(dominated_versions, pkg, arch, suite,
156 dominant_version, delete_table, delete_col, packages, session)
157 return [dominant_versions]
159 ################################################################################
161 def cleanup(packages, session):
162 # Build up the index used by the clean up functions
164 for unique_id in packages.keys():
165 suite = packages[unique_id]["suite"]
166 pkg = packages[unique_id]["pkg"]
167 arch = packages[unique_id]["arch"]
168 version = packages[unique_id]["version"]
169 d.setdefault(suite, {})
170 d[suite].setdefault(pkg, {})
171 d[suite][pkg].setdefault(arch, [])
172 d[suite][pkg][arch].append([version, unique_id])
173 # Clean up old versions
174 for suite in d.keys():
175 for pkg in d[suite].keys():
176 for arch in d[suite][pkg].keys():
177 versions = d[suite][pkg][arch]
178 if len(versions) > 1:
179 d[suite][pkg][arch] = remove_duplicate_versions(versions, packages, session)
181 # Arch: all -> any and vice versa
182 for suite in d.keys():
183 for pkg in d[suite].keys():
184 arches = d[suite][pkg]
185 # If we don't have any arch: all; we've nothing to do
186 if not arches.has_key("all"):
188 # Check to see if we have arch: all and arch: !all (ignoring source)
189 num_arches = len(arches.keys())
190 if arches.has_key("source"):
192 # If we do, remove the duplicates
195 for arch in arches.keys():
197 versions.extend(d[suite][pkg][arch])
198 resolve_arch_all_vs_any(versions, packages, session)
200 ################################################################################
202 def write_filelist(suite, component, arch, type, list, packages, dislocated_files):
205 # Work out the filename
208 arch = "debian-installer_binary-%s" % (arch)
210 arch = "binary-%s" % (arch)
211 filename = os.path.join(cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch))
212 output = utils.open_file(filename, "w")
213 # Generate the final list of files
216 path = packages[fileid]["path"]
217 filename = packages[fileid]["filename"]
218 file_id = packages[fileid]["file_id"]
219 pkg = packages[fileid]["pkg"]
220 if suite == "stable" and dislocated_files.has_key(file_id):
221 filename = dislocated_files[file_id]
223 filename = path + filename
224 if files.has_key(pkg):
225 utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename))
227 files[pkg] = filename
228 # Sort the files since apt-ftparchive doesn't
231 # Write the list of files out
233 output.write(files[pkg]+'\n')
236 ################################################################################
238 def write_filelists(packages, dislocated_files, session):
239 # Build up the index to iterate over
243 for unique_id in packages.keys():
244 suite = packages[unique_id]["suite"]
245 component = packages[unique_id]["component"]
246 arch = packages[unique_id]["arch"]
247 packagetype = packages[unique_id]["filetype"]
248 d.setdefault(suite, {})
249 d[suite].setdefault(component, {})
250 d[suite][component].setdefault(arch, {})
251 d[suite][component][arch].setdefault(packagetype, [])
252 d[suite][component][arch][packagetype].append(unique_id)
253 # Flesh out the index
254 if not Options["Suite"]:
255 suites = cnf.SubTree("Suite").List()
257 suites = utils.split_args(Options["Suite"])
258 for suite in [ i.lower() for i in suites ]:
259 d.setdefault(suite, {})
260 if not Options["Component"]:
261 components = cnf.ValueList("Suite::%s::Components" % (suite))
263 components = utils.split_args(Options["Component"])
264 udeb_components = cnf.ValueList("Suite::%s::UdebComponents" % (suite))
265 for component in components:
266 d[suite].setdefault(component, {})
267 if component in udeb_components:
268 binary_types = [ "deb", "udeb" ]
270 binary_types = [ "deb" ]
271 if not Options["Architecture"]:
272 architectures = [ a.arch_string for a in get_suite_architectures(suite, session=session) ]
274 architectures = utils.split_args(Options["Architecture"])
275 for arch in [ i.lower() for i in architectures ]:
276 d[suite][component].setdefault(arch, {})
281 for packagetype in types:
282 d[suite][component][arch].setdefault(packagetype, [])
284 for suite in d.keys():
285 if cnf.has_key("Suite::%s::Components" % (suite)):
286 for component in d[suite].keys():
287 for arch in d[suite][component].keys():
290 for packagetype in d[suite][component][arch].keys():
291 filelist = d[suite][component][arch][packagetype]
292 # If it's a binary, we need to add in the arch: all debs too
294 archall_suite = cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
296 filelist.extend(d[archall_suite][component]["all"][packagetype])
297 elif d[suite][component].has_key("all") and \
298 d[suite][component]["all"].has_key(packagetype):
299 filelist.extend(d[suite][component]["all"][packagetype])
300 write_filelist(suite, component, arch, packagetype, filelist,
301 packages, dislocated_files)
302 else: # something broken
303 utils.warn("Suite %s has no components." % (suite))
305 ################################################################################
310 # If we're only doing a subset of suites, ensure we do enough to
311 # be able to do arch: all mapping.
313 suites = utils.split_args(Options["Suite"])
315 archall_suite = cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
316 if archall_suite and archall_suite not in suites:
317 utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
318 suites.append(archall_suite)
319 Options["Suite"] = ",".join(suites)
321 (con_suites, con_architectures, con_components, check_source) = \
322 utils.parse_args(Options)
324 session = DBConn().session()
326 dislocated_files = {}
328 # TODO: Fix this properly
329 query_args = {'con_suites': con_suites, 'con_architectures': con_architectures, 'con_components': con_components}
332 SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name,
333 f.id, su.suite_name, b.type
334 FROM binaries b, bin_associations ba, architecture a, files f, location l,
335 component c, suite su
336 WHERE b.id = ba.bin AND b.file = f.id AND b.architecture = a.id
337 AND f.location = l.id AND l.component = c.id AND ba.suite = su.id
338 %(con_suites)s %(con_architectures)s %(con_components)s""" % query_args
342 SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id,
344 FROM source s, src_associations sa, files f, location l, component c, suite su
345 WHERE s.id = sa.source AND s.file = f.id AND f.location = l.id
346 AND l.component = c.id AND sa.suite = su.id %(con_suites)s %(con_components)s""" % query_args
348 # Build up the main index of packages
352 q = session.execute(query)
353 for i in q.fetchall():
354 (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i
356 # 'id' comes from either 'binaries' or 'source', so it's not unique
358 packages[unique_id] = dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
359 path=path, filename=filename,
360 component=component, file_id=file_id,
361 suite=suite, filetype = filetype)
362 cleanup(packages, session)
365 # has been replaced by 'dak generate-filelist':
366 #write_filelists(packages, dislocated_files, session)
368 ################################################################################
371 global Options, Logger
374 Arguments = [('a', "architecture", "Make-Suite-File-List::Options::Architecture", "HasArg"),
375 ('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"),
376 ('h', "help", "Make-Suite-File-List::Options::Help"),
377 ('n', "no-delete", "Make-Suite-File-List::Options::No-Delete"),
378 ('f', "force", "Make-Suite-File-List::Options::Force"),
379 ('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")]
380 for i in ["architecture", "component", "help", "no-delete", "suite", "force" ]:
381 if not cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
382 cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
383 apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
384 Options = cnf.SubTree("Make-Suite-File-List::Options")
390 Logger = daklog.Logger(cnf.Cnf, "make-suite-file-list")
394 #########################################################################################
396 if __name__ == '__main__':