################################################################################
-def reverse_depends_check(removals, suites, arches=None):
+def reverse_depends_check(removals, suite, arches=None, session=None):
+ dbsuite = get_suite(suite, session)
cnf = Config()
print "Checking reverse dependencies..."
- components = get_component_names()
dep_problem = 0
p2c = {}
all_broken = {}
if arches:
all_arches = set(arches)
else:
- all_arches = set([x.arch_string for x in get_suite_architectures(suites[0])])
+ all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
all_arches -= set(["source", "all"])
+ metakey_d = get_or_set_metadatakey("Depends", session)
+ metakey_p = get_or_set_metadatakey("Provides", session)
+ params = {
+ 'suite_id': dbsuite.suite_id,
+ 'metakey_d_id': metakey_d.key_id,
+ 'metakey_p_id': metakey_p.key_id,
+ 'arch_all_id' : get_architecture('all', session).arch_id,
+ }
for architecture in all_arches:
deps = {}
sources = {}
virtual_packages = {}
- for component in components:
- filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- (fd, temp_filename) = utils.temp_filename()
- (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
- if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
- # Also check for udebs
- filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
- if os.path.exists(filename):
- (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_filename))
- if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
- packages = utils.open_file(temp_filename)
- Packages = apt_pkg.ParseTagFile(packages)
- while Packages.Step():
- package = Packages.Section.Find("Package")
- source = Packages.Section.Find("Source")
- if not source:
- source = package
- elif ' ' in source:
- source = source.split(' ', 1)[0]
- sources[package] = source
- depends = Packages.Section.Find("Depends")
- if depends:
- deps[package] = depends
- provides = Packages.Section.Find("Provides")
- # Maintain a counter for each virtual package. If a
- # Provides: exists, set the counter to 0 and count all
- # provides by a package not in the list for removal.
- # If the counter stays 0 at the end, we know that only
- # the to-be-removed packages provided this virtual
- # package.
- if provides:
- for virtual_pkg in provides.split(","):
- virtual_pkg = virtual_pkg.strip()
- if virtual_pkg == package: continue
- if not virtual_packages.has_key(virtual_pkg):
- virtual_packages[virtual_pkg] = 0
- if package not in removals:
- virtual_packages[virtual_pkg] += 1
- p2c[package] = component
- packages.close()
- os.unlink(temp_filename)
+ params['arch_id'] = get_architecture(architecture, session).arch_id
+
+ statement = '''
+ WITH suite_binaries AS
+ (select b.id, b.package, b.source, b.file
+ from binaries b WHERE b.id in
+ (SELECT bin FROM bin_associations WHERE suite = :suite_id)
+ AND b.architecture in (:arch_id, :arch_all_id))
+ SELECT b.id, b.package, s.source, c.name as component,
+ bmd.value as depends, bmp.value as provides
+ FROM suite_binaries b
+ LEFT OUTER JOIN binaries_metadata bmd
+ ON b.id = bmd.bin_id AND bmd.key_id = :metakey_d_id
+ LEFT OUTER JOIN binaries_metadata bmp
+ ON b.id = bmp.bin_id AND bmp.key_id = :metakey_p_id
+ JOIN source s ON b.source = s.id
+ JOIN files f ON b.file = f.id
+ JOIN location l ON f.location = l.id
+ JOIN component c ON l.component = c.id'''
+ session.rollback()
+ query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
+ from_statement(statement).params(params)
+ for binary_id, package, source, component, depends, provides in query:
+ sources[package] = source
+ p2c[package] = component
+ if depends is not None:
+ deps[package] = depends
+ # Maintain a counter for each virtual package. If a
+ # Provides: exists, set the counter to 0 and count all
+ # provides by a package not in the list for removal.
+ # If the counter stays 0 at the end, we know that only
+ # the to-be-removed packages provided this virtual
+ # package.
+ if provides is not None:
+ for virtual_pkg in provides.split(","):
+ virtual_pkg = virtual_pkg.strip()
+ if virtual_pkg == package: continue
+ if not virtual_packages.has_key(virtual_pkg):
+ virtual_packages[virtual_pkg] = 0
+ if package not in removals:
+ virtual_packages[virtual_pkg] += 1
# If a virtual package is only provided by the to-be-removed
# packages, treat the virtual package as to-be-removed too.
# Check source dependencies (Build-Depends and Build-Depends-Indep)
all_broken.clear()
- for component in components:
- filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suites[0], component)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- (fd, temp_filename) = utils.temp_filename()
- result, output = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
- if result != 0:
- sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
- sys.exit(result)
- sources = utils.open_file(temp_filename, "r")
- Sources = apt_pkg.ParseTagFile(sources)
- while Sources.Step():
- source = Sources.Section.Find("Package")
- if source in removals: continue
- parsed_dep = []
- for build_dep_type in ["Build-Depends", "Build-Depends-Indep"]:
- build_dep = Sources.Section.get(build_dep_type)
- if build_dep:
- # Remove [arch] information since we want to see breakage on all arches
- build_dep = re_build_dep_arch.sub("", build_dep)
- try:
- parsed_dep += apt_pkg.ParseDepends(build_dep)
- except ValueError, e:
- print "Error for source %s: %s" % (source, e)
- for dep in parsed_dep:
- unsat = 0
- for dep_package, _, _ in dep:
- if dep_package in removals:
- unsat += 1
- if unsat == len(dep):
- if component != "main":
- source = "%s/%s" % (source, component)
- all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
- dep_problem = 1
- sources.close()
- os.unlink(temp_filename)
+ metakey_bd = get_or_set_metadatakey("Build-Depends", session)
+ metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
+ params = {
+ 'suite_id': dbsuite.suite_id,
+ 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
+ }
+ statement = '''
+ SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
+ FROM source s
+ JOIN source_metadata sm ON s.id = sm.src_id
+ WHERE s.id in
+ (SELECT source FROM src_associations
+ WHERE suite = :suite_id)
+ AND sm.key_id in :metakey_ids
+ GROUP BY s.id, s.source'''
+ query = session.query('id', 'source', 'build_dep').from_statement(statement). \
+ params(params)
+ for source_id, source, build_dep in query:
+ if source in removals: continue
+ parsed_dep = []
+ if build_dep is not None:
+ # Remove [arch] information since we want to see breakage on all arches
+ build_dep = re_build_dep_arch.sub("", build_dep)
+ try:
+ parsed_dep += apt_pkg.ParseDepends(build_dep)
+ except ValueError, e:
+ print "Error for source %s: %s" % (source, e)
+ for dep in parsed_dep:
+ unsat = 0
+ for dep_package, _, _ in dep:
+ if dep_package in removals:
+ unsat += 1
+ if unsat == len(dep):
+ component = DBSource.get(source_id, session).get_component_name()
+ if component != "main":
+ source = "%s/%s" % (source, component)
+ all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
+ dep_problem = 1
if all_broken:
print "# Broken Build-Depends:"
# Additional component processing
over_con_components = con_components.replace("c.id", "component")
+ # Don't do dependency checks on multiple suites
+ if Options["Rdep-Check"] and len(suites) > 1:
+ utils.fubar("Reverse dependency check on multiple suites is not implemented.")
+
print "Working...",
sys.stdout.flush()
to_remove = []
if Options["Rdep-Check"]:
arches = utils.split_args(Options["Architecture"])
- reverse_depends_check(removals, suites, arches)
+ reverse_depends_check(removals, suites[0], arches, session)
# If -n/--no-action, drop out here
if Options["No-Action"]: