X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Frm.py;h=3edeac3dc35c3ccdd4a9a736f135426b859461ba;hb=27e00376e81d1c37ff327ee0d39670b266418869;hp=11ec6c9eb09637cfaeb6141cc0bcd4082f3e1c31;hpb=92533c73773f68f5dcf2a79cac930375d39eb639;p=dak.git diff --git a/dak/rm.py b/dak/rm.py index 11ec6c9e..3edeac3d 100755 --- a/dak/rm.py +++ b/dak/rm.py @@ -51,7 +51,7 @@ from daklib.config import Config from daklib.dbconn import * from daklib import utils from daklib.dak_exceptions import * -from daklib.regexes import re_strip_source_version, re_build_dep_arch +from daklib.regexes import re_strip_source_version, re_build_dep_arch, re_bin_only_nmu import debianbts as bts ################################################################################ @@ -98,67 +98,72 @@ def game_over(): ################################################################################ -def reverse_depends_check(removals, suites, arches=None): +def reverse_depends_check(removals, suite, arches=None, session=None): + dbsuite = get_suite(suite, session) cnf = Config() print "Checking reverse dependencies..." - components = cnf.ValueList("Suite::%s::Components" % suites[0]) dep_problem = 0 p2c = {} all_broken = {} if arches: all_arches = set(arches) else: - all_arches = set([x.arch_string for x in get_suite_architectures(suites[0])]) + all_arches = set([x.arch_string for x in get_suite_architectures(suite)]) all_arches -= set(["source", "all"]) + metakey_d = get_or_set_metadatakey("Depends", session) + metakey_p = get_or_set_metadatakey("Provides", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_d_id': metakey_d.key_id, + 'metakey_p_id': metakey_p.key_id, + 'arch_all_id' : get_architecture('all', session).arch_id, + } for architecture in all_arches: deps = {} sources = {} virtual_packages = {} - for component in components: - filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture) - # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance... - (fd, temp_filename) = utils.temp_filename() - (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) - if (result != 0): - utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result) - # Also check for udebs - filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture) - if os.path.exists(filename): - (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_filename)) - if (result != 0): - utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result) - packages = utils.open_file(temp_filename) - Packages = apt_pkg.ParseTagFile(packages) - while Packages.Step(): - package = Packages.Section.Find("Package") - source = Packages.Section.Find("Source") - if not source: - source = package - elif ' ' in source: - source = source.split(' ', 1)[0] - sources[package] = source - depends = Packages.Section.Find("Depends") - if depends: - deps[package] = depends - provides = Packages.Section.Find("Provides") - # Maintain a counter for each virtual package. If a - # Provides: exists, set the counter to 0 and count all - # provides by a package not in the list for removal. - # If the counter stays 0 at the end, we know that only - # the to-be-removed packages provided this virtual - # package. - if provides: - for virtual_pkg in provides.split(","): - virtual_pkg = virtual_pkg.strip() - if virtual_pkg == package: continue - if not virtual_packages.has_key(virtual_pkg): - virtual_packages[virtual_pkg] = 0 - if package not in removals: - virtual_packages[virtual_pkg] += 1 - p2c[package] = component - packages.close() - os.unlink(temp_filename) + params['arch_id'] = get_architecture(architecture, session).arch_id + + statement = ''' + WITH suite_binaries AS + (select b.id, b.package, b.source, b.file + from binaries b WHERE b.id in + (SELECT bin FROM bin_associations WHERE suite = :suite_id) + AND b.architecture in (:arch_id, :arch_all_id)) + SELECT b.id, b.package, s.source, c.name as component, + bmd.value as depends, bmp.value as provides + FROM suite_binaries b + LEFT OUTER JOIN binaries_metadata bmd + ON b.id = bmd.bin_id AND bmd.key_id = :metakey_d_id + LEFT OUTER JOIN binaries_metadata bmp + ON b.id = bmp.bin_id AND bmp.key_id = :metakey_p_id + JOIN source s ON b.source = s.id + JOIN files f ON b.file = f.id + JOIN location l ON f.location = l.id + JOIN component c ON l.component = c.id''' + session.rollback() + query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \ + from_statement(statement).params(params) + for binary_id, package, source, component, depends, provides in query: + sources[package] = source + p2c[package] = component + if depends is not None: + deps[package] = depends + # Maintain a counter for each virtual package. If a + # Provides: exists, set the counter to 0 and count all + # provides by a package not in the list for removal. + # If the counter stays 0 at the end, we know that only + # the to-be-removed packages provided this virtual + # package. + if provides is not None: + for virtual_pkg in provides.split(","): + virtual_pkg = virtual_pkg.strip() + if virtual_pkg == package: continue + if not virtual_packages.has_key(virtual_pkg): + virtual_packages[virtual_pkg] = 0 + if package not in removals: + virtual_packages[virtual_pkg] += 1 # If a virtual package is only provided by the to-be-removed # packages, treat the virtual package as to-be-removed too. @@ -172,7 +177,7 @@ def reverse_depends_check(removals, suites, arches=None): parsed_dep = [] try: parsed_dep += apt_pkg.ParseDepends(deps[package]) - except ValueError, e: + except ValueError as e: print "Error for package %s: %s" % (package, e) for dep in parsed_dep: # Check for partial breakage. If a package has a ORed @@ -206,41 +211,44 @@ def reverse_depends_check(removals, suites, arches=None): # Check source dependencies (Build-Depends and Build-Depends-Indep) all_broken.clear() - for component in components: - filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suites[0], component) - # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance... - (fd, temp_filename) = utils.temp_filename() - result, output = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) - if result != 0: - sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) - sys.exit(result) - sources = utils.open_file(temp_filename, "r") - Sources = apt_pkg.ParseTagFile(sources) - while Sources.Step(): - source = Sources.Section.Find("Package") - if source in removals: continue - parsed_dep = [] - for build_dep_type in ["Build-Depends", "Build-Depends-Indep"]: - build_dep = Sources.Section.get(build_dep_type) - if build_dep: - # Remove [arch] information since we want to see breakage on all arches - build_dep = re_build_dep_arch.sub("", build_dep) - try: - parsed_dep += apt_pkg.ParseDepends(build_dep) - except ValueError, e: - print "Error for source %s: %s" % (source, e) - for dep in parsed_dep: - unsat = 0 - for dep_package, _, _ in dep: - if dep_package in removals: - unsat += 1 - if unsat == len(dep): - if component != "main": - source = "%s/%s" % (source, component) - all_broken.setdefault(source, set()).add(utils.pp_deps(dep)) - dep_problem = 1 - sources.close() - os.unlink(temp_filename) + metakey_bd = get_or_set_metadatakey("Build-Depends", session) + metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id), + } + statement = ''' + SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep + FROM source s + JOIN source_metadata sm ON s.id = sm.src_id + WHERE s.id in + (SELECT source FROM src_associations + WHERE suite = :suite_id) + AND sm.key_id in :metakey_ids + GROUP BY s.id, s.source''' + query = session.query('id', 'source', 'build_dep').from_statement(statement). \ + params(params) + for source_id, source, build_dep in query: + if source in removals: continue + parsed_dep = [] + if build_dep is not None: + # Remove [arch] information since we want to see breakage on all arches + build_dep = re_build_dep_arch.sub("", build_dep) + try: + parsed_dep += apt_pkg.ParseDepends(build_dep) + except ValueError as e: + print "Error for source %s: %s" % (source, e) + for dep in parsed_dep: + unsat = 0 + for dep_package, _, _ in dep: + if dep_package in removals: + unsat += 1 + if unsat == len(dep): + component = DBSource.get(source_id, session).get_component_name() + if component != "main": + source = "%s/%s" % (source, component) + all_broken.setdefault(source, set()).add(utils.pp_deps(dep)) + dep_problem = 1 if all_broken: print "# Broken Build-Depends:" @@ -335,10 +343,14 @@ def main (): carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): - carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) + if cnf.has_key("Dinstall::BugServer"): + carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) + else: + utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in arguments: - carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) + if cnf.has_key("Dinstall::PackagesServer"): + carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) if cnf.has_key("Dinstall::TrackingServer"): carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"]) elif '@' in copy_to: @@ -364,8 +376,8 @@ def main (): s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) - if suite == "stable": - print "**WARNING** About to remove from the stable suite!" + if suite in ("oldstable", "stable"): + print "**WARNING** About to remove from the (old)stable suite!" print "This should only be done just prior to a (point) release and not at" print "any other time." game_over() @@ -382,6 +394,10 @@ def main (): # Additional component processing over_con_components = con_components.replace("c.id", "component") + # Don't do dependency checks on multiple suites + if Options["Rdep-Check"] and len(suites) > 1: + utils.fubar("Reverse dependency check on multiple suites is not implemented.") + print "Working...", sys.stdout.flush() to_remove = [] @@ -494,6 +510,8 @@ def main (): print "Will also close bugs: "+Options["Done"] if carbon_copy: print "Will also send CCs to: " + ", ".join(carbon_copy) + if Options["Do-Close"]: + print "Will also close associated bug reports." print print "------------------- Reason -------------------" print Options["Reason"] @@ -502,7 +520,7 @@ def main (): if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) - reverse_depends_check(removals, suites, arches) + reverse_depends_check(removals, suites[0], arches, session) # If -n/--no-action, drop out here if Options["No-Action"]: @@ -523,8 +541,6 @@ def main (): logfile.write("Closed bugs: %s\n" % (Options["Done"])) logfile.write("\n------------------- Reason -------------------\n%s\n" % (Options["Reason"])) logfile.write("----------------------------------------------\n") - logfile.write("=========================================================================\n") - logfile.close() # Do the same in rfc822 format logfile822 = utils.open_file(cnf["Rm::LogFile822"], 'a') @@ -553,8 +569,6 @@ def main (): logfile822.write("Reason: %s\n" % Options["Reason"].replace('\n', '\n ')) if Options["Done"]: logfile822.write("Bug: %s\n" % Options["Done"]) - logfile822.write("\n") - logfile822.close() dsc_type_id = get_override_type('dsc', session).overridetype_id deb_type_id = get_override_type('deb', session).overridetype_id @@ -587,6 +601,19 @@ def main (): session.commit() print "done." + # If we don't have a Bug server configured, we're done + if not cnf.has_key("Dinstall::BugServer"): + if Options["Done"] or Options["Do-Close"]: + print "Cannot send mail to BugServer as Dinstall::BugServer is not configured" + + logfile.write("=========================================================================\n") + logfile.close() + + logfile822.write("\n") + logfile822.close() + + return + # read common subst variables for all bug closure mails Subst_common = {} Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] @@ -615,21 +642,31 @@ def main (): summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"]) summarymail += "----------------------------------------------\n" Subst_close_rm["__SUMMARY__"] = summarymail + whereami = utils.where_am_i() - Archive = cnf.SubTree("Archive::%s" % (whereami)) - Subst_close_rm["__MASTER_ARCHIVE__"] = Archive["OriginServer"] - Subst_close_rm["__PRIMARY_MIRROR__"] = Archive["PrimaryMirror"] + Archive = get_archive(whereami, session) + if Archive is None: + utils.warn("Cannot find archive %s. Setting blank values for origin" % whereami) + Subst_close_rm["__MASTER_ARCHIVE__"] = "" + Subst_close_rm["__PRIMARY_MIRROR__"] = "" + else: + Subst_close_rm["__MASTER_ARCHIVE__"] = Archive.origin_server + Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror + for bug in utils.split_args(Options["Done"]): Subst_close_rm["__BUG_NUMBER__"] = bug - mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close") + if Options["Do-Close"]: + mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related") + else: + mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close") utils.send_mail(mail_message) # close associated bug reports - # FIXME: We should also close possible WNPP bugs for that package, but - # currently there's no sane way to determine them if Options["Do-Close"]: Subst_close_other = Subst_common bcc = [] + wnpp = utils.parse_wnpp_bug_file() + versions = list(set([re_bin_only_nmu.sub('', v) for v in versions])) if len(versions) == 1: Subst_close_other["__VERSION__"] = versions[0] else: @@ -639,18 +676,48 @@ def main (): else: Subst_close_other["__BCC__"] = "X-Filler: 42" # at this point, I just assume, that the first closed bug gives - # some usefull information on why the package got removed + # some useful information on why the package got removed Subst_close_other["__BUG_NUMBER__"] = utils.split_args(Options["Done"])[0] - if len(sources) > 1: + if len(sources) == 1: + source_pkg = source.split("_", 1)[0] + else: utils.fubar("Closing bugs for multiple source pakcages is not supported. Do it yourself.") Subst_close_other["__BUG_NUMBER_ALSO__"] = "" - Subst_close_other["__SOURCE__"] = source.split("_", 1)[0] - for bug in bts.get_bugs('src', source.split("_", 1)[0], 'status', 'open'): - Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," + Subst_close_other["__SOURCE__"] = source_pkg + other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open') + if other_bugs: + logfile.write("Also closing bug(s):") + logfile822.write("Also-Bugs:") + for bug in other_bugs: + Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," + logfile.write(" " + str(bug)) + logfile822.write(" " + str(bug)) + logfile.write("\n") + logfile822.write("\n") + if source_pkg in wnpp.keys(): + logfile.write("Also closing WNPP bug(s):") + logfile822.write("Also-WNPP:") + for bug in wnpp[source_pkg]: + # the wnpp-rm file we parse also contains our removal + # bugs, filtering that out + if bug != Subst_close_other["__BUG_NUMBER__"]: + Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," + logfile.write(" " + str(bug)) + logfile822.write(" " + str(bug)) + logfile.write("\n") + logfile822.write("\n") + mail_message = utils.TemplateSubst(Subst_close_other,cnf["Dir::Templates"]+"/rm.bug-close-related") if Subst_close_other["__BUG_NUMBER_ALSO__"]: utils.send_mail(mail_message) + + logfile.write("=========================================================================\n") + logfile.close() + + logfile822.write("\n") + logfile822.close() + ####################################################################################### if __name__ == '__main__':