from daklib.dbconn import *
from daklib import utils
from daklib.dak_exceptions import *
-from daklib.regexes import re_strip_source_version, re_build_dep_arch
+from daklib.regexes import re_strip_source_version, re_bin_only_nmu
import debianbts as bts
################################################################################
################################################################################
-def reverse_depends_check(removals, suites, arches=None):
- cnf = Config()
-
+def reverse_depends_check(removals, suite, arches=None, session=None):
print "Checking reverse dependencies..."
- components = cnf.ValueList("Suite::%s::Components" % suites[0])
- dep_problem = 0
- p2c = {}
- all_broken = {}
- if arches:
- all_arches = set(arches)
- else:
- all_arches = set([x.arch_string for x in get_suite_architectures(suites[0])])
- all_arches -= set(["source", "all"])
- for architecture in all_arches:
- deps = {}
- sources = {}
- virtual_packages = {}
- for component in components:
- filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- (fd, temp_filename) = utils.temp_filename()
- (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
- if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
- # Also check for udebs
- filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
- if os.path.exists(filename):
- (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_filename))
- if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
- packages = utils.open_file(temp_filename)
- Packages = apt_pkg.ParseTagFile(packages)
- while Packages.Step():
- package = Packages.Section.Find("Package")
- source = Packages.Section.Find("Source")
- if not source:
- source = package
- elif ' ' in source:
- source = source.split(' ', 1)[0]
- sources[package] = source
- depends = Packages.Section.Find("Depends")
- if depends:
- deps[package] = depends
- provides = Packages.Section.Find("Provides")
- # Maintain a counter for each virtual package. If a
- # Provides: exists, set the counter to 0 and count all
- # provides by a package not in the list for removal.
- # If the counter stays 0 at the end, we know that only
- # the to-be-removed packages provided this virtual
- # package.
- if provides:
- for virtual_pkg in provides.split(","):
- virtual_pkg = virtual_pkg.strip()
- if virtual_pkg == package: continue
- if not virtual_packages.has_key(virtual_pkg):
- virtual_packages[virtual_pkg] = 0
- if package not in removals:
- virtual_packages[virtual_pkg] += 1
- p2c[package] = component
- packages.close()
- os.unlink(temp_filename)
-
- # If a virtual package is only provided by the to-be-removed
- # packages, treat the virtual package as to-be-removed too.
- for virtual_pkg in virtual_packages.keys():
- if virtual_packages[virtual_pkg] == 0:
- removals.append(virtual_pkg)
-
- # Check binary dependencies (Depends)
- for package in deps.keys():
- if package in removals: continue
- parsed_dep = []
- try:
- parsed_dep += apt_pkg.ParseDepends(deps[package])
- except ValueError, e:
- print "Error for package %s: %s" % (package, e)
- for dep in parsed_dep:
- # Check for partial breakage. If a package has a ORed
- # dependency, there is only a dependency problem if all
- # packages in the ORed depends will be removed.
- unsat = 0
- for dep_package, _, _ in dep:
- if dep_package in removals:
- unsat += 1
- if unsat == len(dep):
- component = p2c[package]
- source = sources[package]
- if component != "main":
- source = "%s/%s" % (source, component)
- all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
- dep_problem = 1
-
- if all_broken:
- print "# Broken Depends:"
- for source, bindict in sorted(all_broken.items()):
- lines = []
- for binary, arches in sorted(bindict.items()):
- if arches == all_arches:
- lines.append(binary)
- else:
- lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
- print '%s: %s' % (source, lines[0])
- for line in lines[1:]:
- print ' ' * (len(source) + 2) + line
- print
-
- # Check source dependencies (Build-Depends and Build-Depends-Indep)
- all_broken.clear()
- for component in components:
- filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suites[0], component)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- (fd, temp_filename) = utils.temp_filename()
- result, output = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
- if result != 0:
- sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
- sys.exit(result)
- sources = utils.open_file(temp_filename, "r")
- Sources = apt_pkg.ParseTagFile(sources)
- while Sources.Step():
- source = Sources.Section.Find("Package")
- if source in removals: continue
- parsed_dep = []
- for build_dep_type in ["Build-Depends", "Build-Depends-Indep"]:
- build_dep = Sources.Section.get(build_dep_type)
- if build_dep:
- # Remove [arch] information since we want to see breakage on all arches
- build_dep = re_build_dep_arch.sub("", build_dep)
- try:
- parsed_dep += apt_pkg.ParseDepends(build_dep)
- except ValueError, e:
- print "Error for source %s: %s" % (source, e)
- for dep in parsed_dep:
- unsat = 0
- for dep_package, _, _ in dep:
- if dep_package in removals:
- unsat += 1
- if unsat == len(dep):
- if component != "main":
- source = "%s/%s" % (source, component)
- all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
- dep_problem = 1
- sources.close()
- os.unlink(temp_filename)
-
- if all_broken:
- print "# Broken Build-Depends:"
- for source, bdeps in sorted(all_broken.items()):
- bdeps = sorted(bdeps)
- print '%s: %s' % (source, bdeps[0])
- for bdep in bdeps[1:]:
- print ' ' * (len(source) + 2) + bdep
- print
-
- if dep_problem:
+ if utils.check_reverse_depends(removals, suite, arches, session):
print "Dependency problem found."
if not Options["No-Action"]:
game_over()
if not cnf.has_key("Rm::Options::Suite"):
cnf["Rm::Options::Suite"] = "unstable"
- arguments = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
- Options = cnf.SubTree("Rm::Options")
+ arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.subtree("Rm::Options")
if Options["Help"]:
usage()
carbon_copy = []
for copy_to in utils.split_args(Options.get("Carbon-Copy")):
if copy_to.isdigit():
- carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"])
+ if cnf.has_key("Dinstall::BugServer"):
+ carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"])
+ else:
+ utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to)
elif copy_to == 'package':
for package in arguments:
if cnf.has_key("Dinstall::PackagesServer"):
s = get_suite(suite, session=session)
if s is not None:
suite_ids_list.append(s.suite_id)
- if suite == "stable":
- print "**WARNING** About to remove from the stable suite!"
+ if suite in ("oldstable", "stable"):
+ print "**WARNING** About to remove from the (old)stable suite!"
print "This should only be done just prior to a (point) release and not at"
print "any other time."
game_over()
# Additional component processing
over_con_components = con_components.replace("c.id", "component")
+ # Don't do dependency checks on multiple suites
+ if Options["Rdep-Check"] and len(suites) > 1:
+ utils.fubar("Reverse dependency check on multiple suites is not implemented.")
+
print "Working...",
sys.stdout.flush()
to_remove = []
if Options["Binary-Only"]:
# Binary-only
- q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures))
+ q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures))
for i in q.fetchall():
to_remove.append(i)
else:
# Source-only
source_packages = {}
- q = session.execute("SELECT l.path, f.filename, s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, files f, location l, component c WHERE sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
+ q = session.execute("SELECT archive.path || '/pool/' || c.name || '/', f.filename, s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE sa.source = s.id AND sa.suite = su.id AND archive.id = su.archive_id AND s.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components))
for i in q.fetchall():
source_packages[i[2]] = i[:2]
to_remove.append(i[2:])
# Source + Binary
binary_packages = {}
# First get a list of binary package names we suspect are linked to the source
- q = session.execute("SELECT DISTINCT b.package FROM binaries b, source s, src_associations sa, suite su, files f, location l, component c WHERE b.source = s.id AND sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
+ q = session.execute("SELECT DISTINCT b.package FROM binaries b, source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE b.source = s.id AND sa.source = s.id AND sa.suite = su.id AND su.archive_id = archive.id AND s.file = f.id AND f.id = af.file_id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components))
for i in q.fetchall():
binary_packages[i[0]] = ""
# Then parse each .dsc that we found earlier to see what binary packages it thinks it produces
# source package and if so add it to the list of packages
# to be removed.
for package in binary_packages.keys():
- q = session.execute("SELECT l.path, f.filename, b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s AND b.package = '%s'" % (con_suites, con_components, con_architectures, package))
+ q = session.execute("SELECT archive.path || '/pool/' || c.name || '/', f.filename, b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, archive, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND archive.id = su.archive_id AND b.architecture = a.id AND b.file = f.id AND f.id = af.file_id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s AND b.package = '%s'" % (con_suites, con_components, con_architectures, package))
for i in q.fetchall():
filename = "/".join(i[:2])
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(filename)))
- source = control.Find("Source", control.Find("Package"))
+ control = apt_pkg.TagSection(utils.deb_extract_control(utils.open_file(filename)))
+ source = control.find("Source", control.find("Package"))
source = re_strip_source_version.sub('', source)
if source_packages.has_key(source):
to_remove.append(i[2:])
versions = []
for package in removals:
versions = d[package].keys()
- versions.sort(apt_pkg.VersionCompare)
+ versions.sort(apt_pkg.version_compare)
for version in versions:
d[package][version].sort(utils.arch_compare_sw)
summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
if Options["Rdep-Check"]:
arches = utils.split_args(Options["Architecture"])
- reverse_depends_check(removals, suites, arches)
+ reverse_depends_check(removals, suites[0], arches, session)
# If -n/--no-action, drop out here
if Options["No-Action"]:
session.commit()
print "done."
+ # If we don't have a Bug server configured, we're done
+ if not cnf.has_key("Dinstall::BugServer"):
+ if Options["Done"] or Options["Do-Close"]:
+ print "Cannot send mail to BugServer as Dinstall::BugServer is not configured"
+
+ logfile.write("=========================================================================\n")
+ logfile.close()
+
+ logfile822.write("\n")
+ logfile822.close()
+
+ return
+
# read common subst variables for all bug closure mails
Subst_common = {}
Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
if Options["Done"]:
Subst_close_rm = Subst_common
bcc = []
- if cnf.Find("Dinstall::Bcc") != "":
+ if cnf.find("Dinstall::Bcc") != "":
bcc.append(cnf["Dinstall::Bcc"])
- if cnf.Find("Rm::Bcc") != "":
+ if cnf.find("Rm::Bcc") != "":
bcc.append(cnf["Rm::Bcc"])
if bcc:
Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"])
summarymail += "----------------------------------------------\n"
Subst_close_rm["__SUMMARY__"] = summarymail
+
whereami = utils.where_am_i()
- Archive = cnf.SubTree("Archive::%s" % (whereami))
- Subst_close_rm["__MASTER_ARCHIVE__"] = Archive["OriginServer"]
- Subst_close_rm["__PRIMARY_MIRROR__"] = Archive["PrimaryMirror"]
+ Archive = get_archive(whereami, session)
+ if Archive is None:
+ utils.warn("Cannot find archive %s. Setting blank values for origin" % whereami)
+ Subst_close_rm["__PRIMARY_MIRROR__"] = ""
+ else:
+ Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror
+
for bug in utils.split_args(Options["Done"]):
Subst_close_rm["__BUG_NUMBER__"] = bug
if Options["Do-Close"]:
Subst_close_other = Subst_common
bcc = []
wnpp = utils.parse_wnpp_bug_file()
+ versions = list(set([re_bin_only_nmu.sub('', v) for v in versions]))
if len(versions) == 1:
Subst_close_other["__VERSION__"] = versions[0]
else:
if len(sources) == 1:
source_pkg = source.split("_", 1)[0]
else:
- utils.fubar("Closing bugs for multiple source pakcages is not supported. Do it yourself.")
+ utils.fubar("Closing bugs for multiple source packages is not supported. Do it yourself.")
Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
Subst_close_other["__SOURCE__"] = source_pkg
other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open')