#!/usr/bin/env python
-# Check for obsolete binary packages
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+"""
+Check for obsolete binary packages
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000-2006 James Troup <james@nocrew.org>
+@copyright: 2009 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# you might as well write some letters to God about how unfair entropy
# is while you're at it.'' -- 20020802143104.GA5628@azure.humbug.org.au
-## TODO: fix NBS looping for version, implement Dubious NBS, fix up output of duplicate source package stuff, improve experimental ?, add support for non-US ?, add overrides, avoid ANAIS for duplicated packages
+## TODO: fix NBS looping for version, implement Dubious NBS, fix up output of
+## duplicate source package stuff, improve experimental ?, add overrides,
+## avoid ANAIS for duplicated packages
################################################################################
-import commands, pg, os, string, sys, time
-import dak.lib.utils, dak.lib.database
+import commands, os, sys, re
import apt_pkg
+from daklib.config import Config
+from daklib.dbconn import *
+from daklib import utils
+from daklib.regexes import re_extract_src_version
+from daklib.cruft import *
+
################################################################################
-Cnf = None
-projectB = None
-suite_id = None
no_longer_in_suite = {}; # Really should be static to add_nbs, but I'm lazy
source_binaries = {}
Check for obsolete or duplicated packages.
-h, --help show this help and exit.
- -m, --mode=MODE chose the MODE to run in (full or daily).
- -s, --suite=SUITE check suite SUITE."""
+ -m, --mode=MODE chose the MODE to run in (full, daily, bdo).
+ -s, --suite=SUITE check suite SUITE.
+ -R, --rdep-check check reverse dependencies
+ -w, --wanna-build-dump where to find the copies of https://buildd.debian.org/stats/*.txt"""
sys.exit(exit_code)
################################################################################
-def add_nbs(nbs_d, source, version, package):
+def add_nbs(nbs_d, source, version, package, suite_id, session):
# Ensure the package is still in the suite (someone may have already removed it)
if no_longer_in_suite.has_key(package):
return
else:
- q = projectB.query("SELECT b.id FROM binaries b, bin_associations ba WHERE ba.bin = b.id AND ba.suite = %s AND b.package = '%s' LIMIT 1" % (suite_id, package))
- if not q.getresult():
+ q = session.execute("""SELECT b.id FROM binaries b, bin_associations ba
+ WHERE ba.bin = b.id AND ba.suite = :suite_id
+ AND b.package = :package LIMIT 1""", {'suite_id': suite_id,
+ 'package': package})
+ if not q.fetchall():
no_longer_in_suite[package] = ""
return
################################################################################
# Check for packages built on architectures they shouldn't be.
-def do_anais(architecture, binaries_list, source):
+def do_anais(architecture, binaries_list, source, session):
if architecture == "any" or architecture == "all":
return ""
for arch in architecture.split():
architectures[arch.strip()] = ""
for binary in binaries_list:
- q = projectB.query("SELECT a.arch_string, b.version FROM binaries b, bin_associations ba, architecture a WHERE ba.suite = %s AND ba.bin = b.id AND b.architecture = a.id AND b.package = '%s'" % (suite_id, binary))
- ql = q.getresult()
+ q = session.execute("""SELECT a.arch_string, b.version
+ FROM binaries b, bin_associations ba, architecture a
+ WHERE ba.suite = :suiteid AND ba.bin = b.id
+ AND b.architecture = a.id AND b.package = :package""",
+ {'suiteid': suite_id, 'package': binary})
+ ql = q.fetchall()
versions = []
for i in ql:
arch = i[0]
version = i[1]
if architectures.has_key(arch):
versions.append(version)
- versions.sort(apt_pkg.VersionCompare)
+ versions.sort(apt_pkg.version_compare)
if versions:
latest_version = versions.pop()
else:
if versions_d != {}:
anais_output += "\n (*) %s_%s [%s]: %s\n" % (binary, latest_version, source, architecture)
versions = versions_d.keys()
- versions.sort(apt_pkg.VersionCompare)
+ versions.sort(apt_pkg.version_compare)
for version in versions:
arches = versions_d[version]
arches.sort()
anais_output += " o %s: %s\n" % (version, ", ".join(arches))
return anais_output
+
################################################################################
-def do_nviu():
- experimental_id = dak.lib.database.get_suite_id("experimental")
- if experimental_id == -1:
- return
- # Check for packages in experimental obsoleted by versions in unstable
- q = projectB.query("""
-SELECT s.source, s.version AS experimental, s2.version AS unstable
- FROM src_associations sa, source s, source s2, src_associations sa2
- WHERE sa.suite = %s AND sa2.suite = %d AND sa.source = s.id
- AND sa2.source = s2.id AND s.source = s2.source
- AND versioncmp(s.version, s2.version) < 0""" % (experimental_id,
- dak.lib.database.get_suite_id("unstable")))
- ql = q.getresult()
- if ql:
- nviu_to_remove = []
- print "Newer version in unstable"
- print "-------------------------"
- print
- for i in ql:
- (source, experimental_version, unstable_version) = i
- print " o %s (%s, %s)" % (source, experimental_version, unstable_version)
- nviu_to_remove.append(source)
+# Check for out-of-date binaries on architectures that do not want to build that
+# package any more, and have them listed as Not-For-Us
+def do_nfu(nfu_packages):
+ output = ""
+
+ a2p = {}
+
+ for architecture in nfu_packages:
+ a2p[architecture] = []
+ for (package,bver,sver) in nfu_packages[architecture]:
+ output += " * [%s] does not want %s (binary %s, source %s)\n" % (architecture, package, bver, sver)
+ a2p[architecture].append(package)
+
+
+ if output:
+ print "Obsolete by Not-For-Us"
+ print "----------------------"
print
- print "Suggested command:"
- print " dak rm -m \"[auto-cruft] NVIU\" -s experimental %s" % (" ".join(nviu_to_remove))
+ print output
+
+ print "Suggested commands:"
+ for architecture in a2p:
+ if a2p[architecture]:
+ print (" dak rm -m \"[auto-cruft] NFU\" -s %s -a %s -b %s" %
+ (suite.suite_name, architecture, " ".join(a2p[architecture])))
print
-################################################################################
+def parse_nfu(architecture):
+ cnf = Config()
+ # utils/hpodder_1.1.5.0: Not-For-Us [optional:out-of-date]
+ r = re.compile("^\w+/([^_]+)_.*: Not-For-Us")
-def do_nbs(real_nbs):
- output = "Not Built from Source\n"
- output += "---------------------\n\n"
+ ret = set()
+
+ filename = "%s/%s-all.txt" % (cnf["Cruft-Report::Options::Wanna-Build-Dump"], architecture)
- nbs_to_remove = []
- nbs_keys = real_nbs.keys()
- nbs_keys.sort()
- for source in nbs_keys:
- output += " * %s_%s builds: %s\n" % (source,
- source_versions.get(source, "??"),
- source_binaries.get(source, "(source does not exist)"))
- output += " but no longer builds:\n"
- versions = real_nbs[source].keys()
- versions.sort(apt_pkg.VersionCompare)
- for version in versions:
- packages = real_nbs[source][version].keys()
- packages.sort()
- for pkg in packages:
- nbs_to_remove.append(pkg)
- output += " o %s: %s\n" % (version, ", ".join(packages))
+ # Not all architectures may have a wanna-build dump, so we want to ignore missin
+ # files
+ if os.path.exists(filename):
+ f = utils.open_file(filename)
+ for line in f:
+ if line[0] == ' ':
+ continue
- output += "\n"
+ m = r.match(line)
+ if m:
+ ret.add(m.group(1))
- if nbs_to_remove:
- print output
+ f.close()
+ else:
+ utils.warn("No wanna-build dump file for architecture %s" % architecture)
+ return ret
+
+################################################################################
+def do_newer_version(lowersuite_name, highersuite_name, code, session):
+ list = newer_version(lowersuite_name, highersuite_name, session)
+ if len(list) > 0:
+ nv_to_remove = []
+ title = "Newer version in %s" % lowersuite_name
+ print title
+ print "-" * len(title)
+ print
+ for i in list:
+ (source, higher_version, lower_version) = i
+ print " o %s (%s, %s)" % (source, higher_version, lower_version)
+ nv_to_remove.append(source)
+ print
print "Suggested command:"
- print " dak rm -m \"[auto-cruft] NBS\" -b %s" % (" ".join(nbs_to_remove))
+ print " dak rm -m \"[auto-cruft] %s\" -s %s %s" % (code, highersuite_name,
+ " ".join(nv_to_remove))
print
################################################################################
+
+def reportWithoutSource(suite_name, suite_id, session, rdeps=False):
+ rows = query_without_source(suite_id, session)
+ title = 'packages without source in suite %s' % suite_name
+ if rows.rowcount > 0:
+ print '%s\n%s\n' % (title, '-' * len(title))
+ message = '"[auto-cruft] no longer built from source"'
+ for row in rows:
+ (package, version) = row
+ print "* package %s in version %s is no longer built from source" % \
+ (package, version)
+ print " - suggested command:"
+ print " dak rm -m %s -s %s -a all -p -R -b %s" % \
+ (message, suite_name, package)
+ if rdeps:
+ if utils.check_reverse_depends([package], suite_name, [], session, True):
+ print
+ else:
+ print " - No dependency problem found\n"
+ else:
+ print
+
+def queryNewerAll(suite_name, session):
+ """searches for arch != all packages that have an arch == all
+ package with a higher version in the same suite"""
+
+ query = """
+select bab1.package, bab1.version as oldver,
+ array_to_string(array_agg(a.arch_string), ',') as oldarch,
+ bab2.version as newver
+ from bin_associations_binaries bab1
+ join bin_associations_binaries bab2
+ on bab1.package = bab2.package and bab1.version < bab2.version and
+ bab1.suite = bab2.suite and bab1.architecture > 2 and
+ bab2.architecture = 2
+ join architecture a on bab1.architecture = a.id
+ join suite s on bab1.suite = s.id
+ where s.suite_name = :suite_name
+ group by bab1.package, oldver, bab1.suite, newver"""
+ return session.execute(query, { 'suite_name': suite_name })
+
+def reportNewerAll(suite_name, session):
+ rows = queryNewerAll(suite_name, session)
+ title = 'obsolete arch any packages in suite %s' % suite_name
+ if rows.rowcount > 0:
+ print '%s\n%s\n' % (title, '-' * len(title))
+ message = '"[auto-cruft] obsolete arch any package"'
+ for row in rows:
+ (package, oldver, oldarch, newver) = row
+ print "* package %s is arch any in version %s but arch all in version %s" % \
+ (package, oldver, newver)
+ print " - suggested command:"
+ print " dak rm -m %s -s %s -a %s -p -b %s\n" % \
+ (message, suite_name, oldarch, package)
+
+
+
+def reportNBS(suite_name, suite_id, rdeps=False):
+ session = DBConn().session()
+ nbsRows = queryNBS(suite_id, session)
+ title = 'NBS packages in suite %s' % suite_name
+ if nbsRows.rowcount > 0:
+ print '%s\n%s\n' % (title, '-' * len(title))
+ for row in nbsRows:
+ (pkg_list, arch_list, source, version) = row
+ pkg_string = ' '.join(pkg_list)
+ arch_string = ','.join(arch_list)
+ print "* source package %s version %s no longer builds" % \
+ (source, version)
+ print " binary package(s): %s" % pkg_string
+ print " on %s" % arch_string
+ print " - suggested command:"
+ message = '"[auto-cruft] NBS (no longer built by %s)"' % source
+ print " dak rm -m %s -s %s -a %s -p -R -b %s" % \
+ (message, suite_name, arch_string, pkg_string)
+ if rdeps:
+ if utils.check_reverse_depends(pkg_list, suite_name, arch_list, session, True):
+ print
+ else:
+ print " - No dependency problem found\n"
+ else:
+ print
+ session.close()
+
+def reportAllNBS(suite_name, suite_id, session, rdeps=False):
+ reportWithoutSource(suite_name, suite_id, session, rdeps)
+ reportNewerAll(suite_name, session)
+ reportNBS(suite_name, suite_id, rdeps)
+
+################################################################################
+
def do_dubious_nbs(dubious_nbs):
print "Dubious NBS"
print "-----------"
- print
+ print
dubious_nbs_keys = dubious_nbs.keys()
dubious_nbs_keys.sort()
source_binaries.get(source, "(source does not exist)"))
print " won't admit to building:"
versions = dubious_nbs[source].keys()
- versions.sort(apt_pkg.VersionCompare)
+ versions.sort(apt_pkg.version_compare)
for version in versions:
packages = dubious_nbs[source][version].keys()
packages.sort()
print " o %s: %s" % (version, ", ".join(packages))
- print
+ print
################################################################################
-def do_obsolete_source(duplicate_bins, bin2source):
- obsolete = {}
- for key in duplicate_bins.keys():
- (source_a, source_b) = key.split('~')
- for source in [ source_a, source_b ]:
- if not obsolete.has_key(source):
- if not source_binaries.has_key(source):
- # Source has already been removed
- continue
- else:
- obsolete[source] = map(string.strip,
- source_binaries[source].split(','))
- for binary in duplicate_bins[key]:
- if bin2source.has_key(binary) and bin2source[binary]["source"] == source:
- continue
- if binary in obsolete[source]:
- obsolete[source].remove(binary)
-
- to_remove = []
- output = "Obsolete source package\n"
- output += "-----------------------\n\n"
- obsolete_keys = obsolete.keys()
- obsolete_keys.sort()
- for source in obsolete_keys:
- if not obsolete[source]:
- to_remove.append(source)
- output += " * %s (%s)\n" % (source, source_versions[source])
- for binary in map(string.strip, source_binaries[source].split(',')):
- if bin2source.has_key(binary):
- output += " o %s (%s) is built by %s.\n" \
- % (binary, bin2source[binary]["version"],
- bin2source[binary]["source"])
- else:
- output += " o %s is not built.\n" % binary
- output += "\n"
+def obsolete_source(suite_name, session):
+ """returns obsolete source packages for suite_name without binaries
+ in the same suite sorted by install_date; install_date should help
+ detecting source only (or binary throw away) uploads; duplicates in
+ the suite are skipped
+
+ subquery 'source_suite_unique' returns source package names from
+ suite without duplicates; the rationale behind is that neither
+ cruft-report nor rm cannot handle duplicates (yet)"""
+
+ query = """
+WITH source_suite_unique AS
+ (SELECT source, suite
+ FROM source_suite GROUP BY source, suite HAVING count(*) = 1)
+SELECT ss.src, ss.source, ss.version,
+ to_char(ss.install_date, 'YYYY-MM-DD') AS install_date
+ FROM source_suite ss
+ JOIN source_suite_unique ssu
+ ON ss.source = ssu.source AND ss.suite = ssu.suite
+ JOIN suite s ON s.id = ss.suite
+ LEFT JOIN bin_associations_binaries bab
+ ON ss.src = bab.source AND ss.suite = bab.suite
+ WHERE s.suite_name = :suite_name AND bab.id IS NULL
+ AND now() - ss.install_date > '1 day'::interval
+ ORDER BY install_date"""
+ args = { 'suite_name': suite_name }
+ return session.execute(query, args)
+
+def source_bin(source, session):
+ """returns binaries built by source for all or no suite grouped and
+ ordered by package name"""
+
+ query = """
+SELECT b.package
+ FROM binaries b
+ JOIN src_associations_src sas ON b.source = sas.src
+ WHERE sas.source = :source
+ GROUP BY b.package
+ ORDER BY b.package"""
+ args = { 'source': source }
+ return session.execute(query, args)
+
+def newest_source_bab(suite_name, package, session):
+ """returns newest source that builds binary package in suite grouped
+ and sorted by source and package name"""
+
+ query = """
+SELECT sas.source, MAX(sas.version) AS srcver
+ FROM src_associations_src sas
+ JOIN bin_associations_binaries bab ON sas.src = bab.source
+ JOIN suite s on s.id = bab.suite
+ WHERE s.suite_name = :suite_name AND bab.package = :package
+ GROUP BY sas.source, bab.package
+ ORDER BY sas.source, bab.package"""
+ args = { 'suite_name': suite_name, 'package': package }
+ return session.execute(query, args)
+
+def report_obsolete_source(suite_name, session):
+ rows = obsolete_source(suite_name, session)
+ if rows.rowcount == 0:
+ return
+ print \
+"""Obsolete source packages in suite %s
+----------------------------------%s\n""" % \
+ (suite_name, '-' * len(suite_name))
+ for os_row in rows.fetchall():
+ (src, old_source, version, install_date) = os_row
+ print " * obsolete source %s version %s installed at %s" % \
+ (old_source, version, install_date)
+ for sb_row in source_bin(old_source, session):
+ (package, ) = sb_row
+ print " - has built binary %s" % package
+ for nsb_row in newest_source_bab(suite_name, package, session):
+ (new_source, srcver) = nsb_row
+ print " currently built by source %s version %s" % \
+ (new_source, srcver)
+ print " - suggested command:"
+ rm_opts = "-S -p -m \"[auto-cruft] obsolete source package\""
+ print " dak rm -s %s %s %s\n" % (suite_name, rm_opts, old_source)
+
+def get_suite_binaries(suite, session):
+ # Initalize a large hash table of all binary packages
+ binaries = {}
+
+ print "Getting a list of binary packages in %s..." % suite.suite_name
+ q = session.execute("""SELECT distinct b.package
+ FROM binaries b, bin_associations ba
+ WHERE ba.suite = :suiteid AND ba.bin = b.id""",
+ {'suiteid': suite.suite_id})
+ for i in q.fetchall():
+ binaries[i[0]] = ""
+
+ return binaries
- if to_remove:
- print output
+################################################################################
- print "Suggested command:"
- print " dak rm -S -p -m \"[auto-cruft] obsolete source package\" %s" % (" ".join(to_remove))
- print
+def report_outdated_nonfree(suite, session, rdeps=False):
+
+ packages = {}
+ query = """WITH outdated_sources AS (
+ SELECT s.source, s.version, s.id
+ FROM source s
+ JOIN src_associations sa ON sa.source = s.id
+ WHERE sa.suite IN (
+ SELECT id
+ FROM suite
+ WHERE suite_name = :suite )
+ AND sa.created < (now() - interval :delay)
+ EXCEPT SELECT s.source, max(s.version) AS version, max(s.id)
+ FROM source s
+ JOIN src_associations sa ON sa.source = s.id
+ WHERE sa.suite IN (
+ SELECT id
+ FROM suite
+ WHERE suite_name = :suite )
+ AND sa.created < (now() - interval :delay)
+ GROUP BY s.source ),
+ binaries AS (
+ SELECT b.package, s.source, (
+ SELECT a.arch_string
+ FROM architecture a
+ WHERE a.id = b.architecture ) AS arch
+ FROM binaries b
+ JOIN outdated_sources s ON s.id = b.source
+ JOIN bin_associations ba ON ba.bin = b.id
+ JOIN override o ON o.package = b.package AND o.suite = ba.suite
+ WHERE ba.suite IN (
+ SELECT id
+ FROM suite
+ WHERE suite_name = :suite )
+ AND o.component IN (
+ SELECT id
+ FROM component
+ WHERE name = 'non-free' ) )
+ SELECT DISTINCT package, source, arch
+ FROM binaries
+ ORDER BY source, package, arch"""
+
+ res = session.execute(query, {'suite': suite, 'delay': "'15 days'"})
+ for package in res:
+ binary = package[0]
+ source = package[1]
+ arch = package[2]
+ if arch == 'all':
+ continue
+ if not source in packages:
+ packages[source] = {}
+ if not binary in packages[source]:
+ packages[source][binary] = set()
+ packages[source][binary].add(arch)
+ if packages:
+ title = 'Outdated non-free binaries in suite %s' % suite
+ message = '"[auto-cruft] outdated non-free binaries"'
+ print '%s\n%s\n' % (title, '-' * len(title))
+ for source in sorted(packages):
+ archs = set()
+ binaries = set()
+ print '* package %s has outdated non-free binaries' % source
+ print ' - suggested command:'
+ for binary in sorted(packages[source]):
+ binaries.add(binary)
+ archs = archs.union(packages[source][binary])
+ print ' dak rm -m %s -s %s -a %s -p -R -b %s' % \
+ (message, suite, ','.join(archs), ' '.join(binaries))
+ if rdeps:
+ if utils.check_reverse_depends(list(binaries), suite, archs, session, True):
+ print
+ else:
+ print " - No dependency problem found\n"
+ else:
+ print
################################################################################
def main ():
- global Cnf, projectB, suite_id, source_binaries, source_versions
+ global suite, suite_id, source_binaries, source_versions
- Cnf = dak.lib.utils.get_conf()
+ cnf = Config()
Arguments = [('h',"help","Cruft-Report::Options::Help"),
('m',"mode","Cruft-Report::Options::Mode", "HasArg"),
- ('s',"suite","Cruft-Report::Options::Suite","HasArg")]
- for i in [ "help" ]:
- if not Cnf.has_key("Cruft-Report::Options::%s" % (i)):
- Cnf["Cruft-Report::Options::%s" % (i)] = ""
- Cnf["Cruft-Report::Options::Suite"] = Cnf["Dinstall::DefaultSuite"]
+ ('R',"rdep-check", "Cruft-Report::Options::Rdep-Check"),
+ ('s',"suite","Cruft-Report::Options::Suite","HasArg"),
+ ('w',"wanna-build-dump","Cruft-Report::Options::Wanna-Build-Dump","HasArg")]
+ for i in [ "help", "Rdep-Check" ]:
+ if not cnf.has_key("Cruft-Report::Options::%s" % (i)):
+ cnf["Cruft-Report::Options::%s" % (i)] = ""
+
+ cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable")
+
+ if not cnf.has_key("Cruft-Report::Options::Mode"):
+ cnf["Cruft-Report::Options::Mode"] = "daily"
- if not Cnf.has_key("Cruft-Report::Options::Mode"):
- Cnf["Cruft-Report::Options::Mode"] = "daily"
+ if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"):
+ cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu"
- apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+ apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
- Options = Cnf.SubTree("Cruft-Report::Options")
+ Options = cnf.subtree("Cruft-Report::Options")
if Options["Help"]:
- usage()
+ usage()
+
+ if Options["Rdep-Check"]:
+ rdeps = True
+ else:
+ rdeps = False
# Set up checks based on mode
if Options["Mode"] == "daily":
- checks = [ "nbs", "nviu", "obsolete source" ]
+ checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ]
elif Options["Mode"] == "full":
- checks = [ "nbs", "nviu", "obsolete source", "dubious nbs", "bnb", "bms", "anais" ]
+ checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
+ elif Options["Mode"] == "bdo":
+ checks = [ "nbs", "obsolete source" ]
else:
- dak.lib.utils.warn("%s is not a recognised mode - only 'full' or 'daily' are understood." % (Options["Mode"]))
+ utils.warn("%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"]))
usage(1)
- projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- dak.lib.database.init(Cnf, projectB)
+ session = DBConn().session()
bin_pkgs = {}
src_pkgs = {}
source_versions = {}
anais_output = ""
- duplicate_bins = {}
- suite = Options["Suite"]
- suite_id = dak.lib.database.get_suite_id(suite)
+ nfu_packages = {}
+
+ suite = get_suite(Options["Suite"].lower(), session)
+ if not suite:
+ utils.fubar("Cannot find suite %s" % Options["Suite"].lower())
+
+ suite_id = suite.suite_id
+ suite_name = suite.suite_name.lower()
+
+ if "obsolete source" in checks:
+ report_obsolete_source(suite_name, session)
+
+ if "nbs" in checks:
+ reportAllNBS(suite_name, suite_id, session, rdeps)
+
+ if "outdated non-free" in checks:
+ report_outdated_nonfree(suite_name, session, rdeps)
bin_not_built = {}
if "bnb" in checks:
- # Initalize a large hash table of all binary packages
- before = time.time()
- sys.stderr.write("[Getting a list of binary packages in %s..." % (suite))
- q = projectB.query("SELECT distinct b.package FROM binaries b, bin_associations ba WHERE ba.suite = %s AND ba.bin = b.id" % (suite_id))
- ql = q.getresult()
- sys.stderr.write("done. (%d seconds)]\n" % (int(time.time()-before)))
- for i in ql:
- bins_in_suite[i[0]] = ""
+ bins_in_suite = get_suite_binaries(suite, session)
# Checks based on the Sources files
- components = Cnf.ValueList("Suite::%s::Components" % (suite))
+ components = get_component_names(session)
for component in components:
- filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suite, component)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = dak.lib.utils.temp_filename()
+ filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component)
+ # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance...
+ (fd, temp_filename) = utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- sources = dak.lib.utils.open_file(temp_filename)
- Sources = apt_pkg.ParseTagFile(sources)
- while Sources.Step():
- source = Sources.Section.Find('Package')
- source_version = Sources.Section.Find('Version')
- architecture = Sources.Section.Find('Architecture')
- binaries = Sources.Section.Find('Binary')
- binaries_list = map(string.strip, binaries.split(','))
+ sources = utils.open_file(temp_filename)
+ Sources = apt_pkg.TagFile(sources)
+ while Sources.step():
+ source = Sources.section.find('Package')
+ source_version = Sources.section.find('Version')
+ architecture = Sources.section.find('Architecture')
+ binaries = Sources.section.find('Binary')
+ binaries_list = [ i.strip() for i in binaries.split(',') ]
if "bnb" in checks:
# Check for binaries not built on any architecture.
bin_not_built[source][binary] = ""
if "anais" in checks:
- anais_output += do_anais(architecture, binaries_list, source)
+ anais_output += do_anais(architecture, binaries_list, source, session)
- # Check for duplicated packages and build indices for checking "no source" later
+ # build indices for checking "no source" later
source_index = component + '/' + source
- if src_pkgs.has_key(source):
- print " %s is a duplicated source package (%s and %s)" % (source, source_index, src_pkgs[source])
src_pkgs[source] = source_index
for binary in binaries_list:
- if bin_pkgs.has_key(binary):
- key_list = [ source, bin_pkgs[binary] ]
- key_list.sort()
- key = '~'.join(key_list)
- duplicate_bins.setdefault(key, [])
- duplicate_bins[key].append(binary)
bin_pkgs[binary] = source
source_binaries[source] = binaries
source_versions[source] = source_version
os.unlink(temp_filename)
# Checks based on the Packages files
- for component in components + ['main/debian-installer']:
- architectures = filter(dak.lib.utils.real_arch, Cnf.ValueList("Suite::%s::Architectures" % (suite)))
+ check_components = components[:]
+ if suite_name != "experimental":
+ check_components.append('main/debian-installer');
+
+ for component in check_components:
+ architectures = [ a.arch_string for a in get_suite_architectures(suite_name,
+ skipsrc=True, skipall=True,
+ session=session) ]
for architecture in architectures:
- filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture)
- # apt_pkg.ParseTagFile needs a real file handle
- temp_filename = dak.lib.utils.temp_filename()
+ if component == 'main/debian-installer' and re.match("kfreebsd", architecture):
+ continue
+ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (suite.archive.path, suite_name, component, architecture)
+ # apt_pkg.TagFile needs a real file handle
+ (fd, temp_filename) = utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- packages = dak.lib.utils.open_file(temp_filename)
- Packages = apt_pkg.ParseTagFile(packages)
- while Packages.Step():
- package = Packages.Section.Find('Package')
- source = Packages.Section.Find('Source', "")
- version = Packages.Section.Find('Version')
+
+ if "nfu" in checks:
+ nfu_packages.setdefault(architecture,[])
+ nfu_entries = parse_nfu(architecture)
+
+ packages = utils.open_file(temp_filename)
+ Packages = apt_pkg.TagFile(packages)
+ while Packages.step():
+ package = Packages.section.find('Package')
+ source = Packages.section.find('Source', "")
+ version = Packages.section.find('Version')
if source == "":
source = package
if bin2source.has_key(package) and \
- apt_pkg.VersionCompare(version, bin2source[package]["version"]) > 0:
+ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0:
bin2source[package]["version"] = version
bin2source[package]["source"] = source
else:
bin2source[package]["version"] = version
bin2source[package]["source"] = source
if source.find("(") != -1:
- m = dak.lib.utils.re_extract_src_version.match(source)
+ m = re_extract_src_version.match(source)
source = m.group(1)
version = m.group(2)
if not bin_pkgs.has_key(package):
nbs[source].setdefault(package, {})
nbs[source][package][version] = ""
else:
- previous_source = bin_pkgs[package]
- if previous_source != source:
- key_list = [ source, previous_source ]
- key_list.sort()
- key = '~'.join(key_list)
- duplicate_bins.setdefault(key, [])
- if package not in duplicate_bins[key]:
- duplicate_bins[key].append(package)
+ if "nfu" in checks:
+ if package in nfu_entries and \
+ version != source_versions[source]: # only suggest to remove out-of-date packages
+ nfu_packages[architecture].append((package,version,source_versions[source]))
+
packages.close()
os.unlink(temp_filename)
-
- if "obsolete source" in checks:
- do_obsolete_source(duplicate_bins, bin2source)
# Distinguish dubious (version numbers match) and 'real' NBS (they don't)
dubious_nbs = {}
- real_nbs = {}
for source in nbs.keys():
for package in nbs[source].keys():
versions = nbs[source][package].keys()
- versions.sort(apt_pkg.VersionCompare)
+ versions.sort(apt_pkg.version_compare)
latest_version = versions.pop()
source_version = source_versions.get(source,"0")
- if apt_pkg.VersionCompare(latest_version, source_version) == 0:
- add_nbs(dubious_nbs, source, latest_version, package)
- else:
- add_nbs(real_nbs, source, latest_version, package)
+ if apt_pkg.version_compare(latest_version, source_version) == 0:
+ add_nbs(dubious_nbs, source, latest_version, package, suite_id, session)
if "nviu" in checks:
- do_nviu()
+ do_newer_version('unstable', 'experimental', 'NVIU', session)
- if "nbs" in checks:
- do_nbs(real_nbs)
+ if "nvit" in checks:
+ do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session)
###
print "="*75
print
+ if "nfu" in checks:
+ do_nfu(nfu_packages)
+
if "bnb" in checks:
print "Unbuilt binary packages"
print "-----------------------"
binaries = bin_not_built[source].keys()
binaries.sort()
print " o %s: %s" % (source, ", ".join(binaries))
- print
+ print
if "bms" in checks:
- print "Built from multiple source packages"
- print "-----------------------------------"
- print
- keys = duplicate_bins.keys()
- keys.sort()
- for key in keys:
- (source_a, source_b) = key.split("~")
- print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key]))
- print
+ report_multiple_source(suite)
if "anais" in checks:
print "Architecture Not Allowed In Source"
print "----------------------------------"
print anais_output
- print
+ print
if "dubious nbs" in checks:
do_dubious_nbs(dubious_nbs)