#!/usr/bin/env python
-# Generate file lists used by apt-ftparchive to generate Packages and Sources files
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+"""
+Generate file lists used by apt-ftparchive to generate Packages and Sources files
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
-import copy, os, pg, sys
+import copy
+import os
+import pg
+import sys
import apt_pkg
-import symlink_dists
from daklib import database
from daklib import logging
from daklib import utils
################################################################################
-projectB = None
-Cnf = None
-Logger = None
-Options = None
+Cnf = None #: Configuration, apt_pkg.Configuration
+projectB = None #: database connection, pgobject
+Logger = None #: Logger object
+Options = None #: Parsed CommandLine arguments
################################################################################
if not packages.has_key(delete_unique_id):
continue
delete_version = version[0]
- delete_id = packages[delete_unique_id]["id"]
+ delete_id = packages[delete_unique_id]["sourceid"]
delete_arch = packages[delete_unique_id]["arch"]
- if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
+ if not database.get_suite_untouchable(suite) or Options["Force"]:
if Options["No-Delete"]:
print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch)
else:
#####################################################
-# Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all
def resolve_arch_all_vs_any(versions, packages):
+ """ Per-suite&pkg: resolve arch-all, vs. arch-any, assumes only one arch-all """
arch_all_version = None
arch_any_versions = copy.copy(versions)
for i in arch_any_versions:
#####################################################
-# Per-suite&pkg&arch: resolve duplicate versions
def remove_duplicate_versions(versions, packages):
+ """ Per-suite&pkg&arch: resolve duplicate versions """
# Sort versions into descending order
versions.sort(version_cmp)
dominant_versions = versions[0]
################################################################################
-def write_legacy_mixed_filelist(suite, list, packages, dislocated_files):
- # Work out the filename
- filename = os.path.join(Cnf["Dir::Lists"], "%s_-_all.list" % (suite))
- output = utils.open_file(filename, "w")
- # Generate the final list of files
- files = {}
- for id in list:
- path = packages[id]["path"]
- filename = packages[id]["filename"]
- file_id = packages[id]["file_id"]
- if suite == "stable" and dislocated_files.has_key(file_id):
- filename = dislocated_files[file_id]
- else:
- filename = path + filename
- if files.has_key(filename):
- utils.warn("%s (in %s) is duplicated." % (filename, suite))
- else:
- files[filename] = ""
- # Sort the files since apt-ftparchive doesn't
- keys = files.keys()
- keys.sort()
- # Write the list of files out
- for file in keys:
- output.write(file+'\n')
- output.close()
-
-############################################################
-
def write_filelist(suite, component, arch, type, list, packages, dislocated_files):
# Work out the filename
if arch != "source":
output = utils.open_file(filename, "w")
# Generate the final list of files
files = {}
- for id in list:
- path = packages[id]["path"]
- filename = packages[id]["filename"]
- file_id = packages[id]["file_id"]
- pkg = packages[id]["pkg"]
+ for fileid in list:
+ path = packages[fileid]["path"]
+ filename = packages[fileid]["filename"]
+ file_id = packages[fileid]["file_id"]
+ pkg = packages[fileid]["pkg"]
if suite == "stable" and dislocated_files.has_key(file_id):
filename = dislocated_files[file_id]
else:
suite = packages[unique_id]["suite"]
component = packages[unique_id]["component"]
arch = packages[unique_id]["arch"]
- type = packages[unique_id]["type"]
+ packagetype = packages[unique_id]["filetype"]
d.setdefault(suite, {})
d[suite].setdefault(component, {})
d[suite][component].setdefault(arch, {})
- d[suite][component][arch].setdefault(type, [])
- d[suite][component][arch][type].append(unique_id)
+ d[suite][component][arch].setdefault(packagetype, [])
+ d[suite][component][arch][packagetype].append(unique_id)
# Flesh out the index
if not Options["Suite"]:
suites = Cnf.SubTree("Suite").List()
else:
components = utils.split_args(Options["Component"])
udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite))
- udeb_components = udeb_components
for component in components:
d[suite].setdefault(component, {})
if component in udeb_components:
else:
binary_types = [ "deb" ]
if not Options["Architecture"]:
- architectures = Cnf.ValueList("Suite::%s::Architectures" % (suite))
+ architectures = database.get_suite_architectures(suite)
else:
architectures = utils.split_args(Options["Architectures"])
for arch in [ i.lower() for i in architectures ]:
types = [ "dsc" ]
else:
types = binary_types
- for type in types:
- d[suite][component][arch].setdefault(type, [])
+ for packagetype in types:
+ d[suite][component][arch].setdefault(packagetype, [])
# Then walk it
for suite in d.keys():
if Cnf.has_key("Suite::%s::Components" % (suite)):
for arch in d[suite][component].keys():
if arch == "all":
continue
- for type in d[suite][component][arch].keys():
- list = d[suite][component][arch][type]
+ for packagetype in d[suite][component][arch].keys():
+ filelist = d[suite][component][arch][packagetype]
# If it's a binary, we need to add in the arch: all debs too
if arch != "source":
archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
if archall_suite:
- list.extend(d[archall_suite][component]["all"][type])
+ filelist.extend(d[archall_suite][component]["all"][packagetype])
elif d[suite][component].has_key("all") and \
- d[suite][component]["all"].has_key(type):
- list.extend(d[suite][component]["all"][type])
- write_filelist(suite, component, arch, type, list,
+ d[suite][component]["all"].has_key(packagetype):
+ filelist.extend(d[suite][component]["all"][packagetype])
+ write_filelist(suite, component, arch, packagetype, filelist,
packages, dislocated_files)
- else: # legacy-mixed suite
- list = []
- for component in d[suite].keys():
- for arch in d[suite][component].keys():
- for type in d[suite][component][arch].keys():
- list.extend(d[suite][component][arch][type])
- write_legacy_mixed_filelist(suite, list, packages, dislocated_files)
-
-################################################################################
-
-# Want to use stable dislocation support: True or false?
-def stable_dislocation_p():
- # If the support is not explicitly enabled, assume it's disabled
- if not Cnf.FindB("Dinstall::StableDislocationSupport"):
- return 0
- # If we don't have a stable suite, obviously a no-op
- if not Cnf.has_key("Suite::Stable"):
- return 0
- # If the suite(s) weren't explicitly listed, all suites are done
- if not Options["Suite"]:
- return 1
- # Otherwise, look in what suites the user specified
- suites = utils.split_args(Options["Suite"])
-
- if "stable" in suites:
- return 1
- else:
- return 0
+ else: # something broken
+ utils.warn("Suite %s has no components." % (suite))
################################################################################
(con_suites, con_architectures, con_components, check_source) = \
utils.parse_args(Options)
- if stable_dislocation_p():
- dislocated_files = symlink_dists.find_dislocated_stable(Cnf, projectB)
- else:
- dislocated_files = {}
+ dislocated_files = {}
query = """
SELECT b.id, b.package, a.arch_string, b.version, l.path, f.filename, c.name,
packages = {}
unique_id = 0
for i in ql:
- (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i
+ (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i
# 'id' comes from either 'binaries' or 'source', so it's not unique
unique_id += 1
- packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version,
+ packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
path=path, filename=filename,
component=component, file_id=file_id,
- suite=suite, type = type)
+ suite=suite, filetype = filetype)
cleanup(packages)
write_filelists(packages, dislocated_files)
('n', "no-delete", "Make-Suite-File-List::Options::No-Delete"),
('f', "force", "Make-Suite-File-List::Options::Force"),
('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")]
- for i in ["architecture", "component", "help", "no-delete", "suite", "force-touch" ]:
+ for i in ["architecture", "component", "help", "no-delete", "suite", "force" ]:
if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)