1 """General purpose package removal code for ftpmaster
3 @contact: Debian FTP Master <ftpmaster@debian.org>
4 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
5 @copyright: 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
6 @copyright: 2015 Niels Thykier <niels@thykier.net>
7 @license: GNU General Public License version 2 or later
9 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
10 # Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 ################################################################################
28 # TODO: Insert "random dak quote" here
30 ################################################################################
35 from collections import defaultdict
36 from regexes import re_build_dep_arch
38 from daklib.dbconn import *
39 from daklib import utils
40 from daklib.regexes import re_bin_only_nmu
41 import debianbts as bts
43 ################################################################################
46 class ReverseDependencyChecker(object):
47 """A bulk tester for reverse dependency checks
49 This class is similar to the check_reverse_depends method from "utils". However,
50 it is primarily focused on facilitating bulk testing of reverse dependencies.
51 It caches the state of the suite and then uses that as basis for answering queries.
52 This saves a significant amount of time if multiple reverse dependency checks are
56 def __init__(self, session, suite):
57 """Creates a new ReverseDependencyChecker instance
59 This will spend a significant amount of time caching data.
61 @type session: SQLA Session
62 @param session: The database session in use
65 @param suite: The name of the suite that is used as basis for removal tests.
67 self._session = session
68 dbsuite = get_suite(suite, session)
69 suite_archs2id = dict((x.arch_string, x.arch_id) for x in get_suite_architectures(suite))
70 package_dependencies, arch_providors_of, arch_provided_by = self._load_package_information(session,
73 self._package_dependencies = package_dependencies
74 self._arch_providors_of = arch_providors_of
75 self._arch_provided_by = arch_provided_by
76 self._archs_in_suite = set(suite_archs2id)
79 def _load_package_information(session, suite_id, suite_archs2id):
80 package_dependencies = defaultdict(lambda: defaultdict(set))
81 arch_providors_of = defaultdict(lambda: defaultdict(set))
82 arch_provided_by = defaultdict(lambda: defaultdict(set))
83 source_deps = defaultdict(set)
84 metakey_d = get_or_set_metadatakey("Depends", session)
85 metakey_p = get_or_set_metadatakey("Provides", session)
88 'arch_all_id': suite_archs2id['all'],
89 'metakey_d_id': metakey_d.key_id,
90 'metakey_p_id': metakey_p.key_id,
92 all_arches = set(suite_archs2id)
93 all_arches.discard('source')
95 package_dependencies['source'] = source_deps
97 for architecture in all_arches:
98 deps = defaultdict(set)
99 providers_of = defaultdict(set)
100 provided_by = defaultdict(set)
101 arch_providors_of[architecture] = providers_of
102 arch_provided_by[architecture] = provided_by
103 package_dependencies[architecture] = deps
105 params['arch_id'] = suite_archs2id[architecture]
109 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
110 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
112 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
113 WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id'''
114 query = session.query('package', 'depends', 'provides'). \
115 from_statement(statement).params(params)
116 for package, depends, provides in query:
118 if depends is not None:
121 for dep in apt_pkg.parse_depends(depends):
122 parsed_dep.append(frozenset(d[0] for d in dep))
123 deps[package].update(parsed_dep)
124 except ValueError as e:
125 print "Error for package %s: %s" % (package, e)
126 # Maintain a counter for each virtual package. If a
127 # Provides: exists, set the counter to 0 and count all
128 # provides by a package not in the list for removal.
129 # If the counter stays 0 at the end, we know that only
130 # the to-be-removed packages provided this virtual
132 if provides is not None:
133 for virtual_pkg in provides.split(","):
134 virtual_pkg = virtual_pkg.strip()
135 if virtual_pkg == package:
137 provided_by[virtual_pkg].add(package)
138 providers_of[package].add(virtual_pkg)
140 # Check source dependencies (Build-Depends and Build-Depends-Indep)
141 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
142 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
144 'suite_id': suite_id,
145 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
148 SELECT s.source, string_agg(sm.value, ', ') as build_dep
150 JOIN source_metadata sm ON s.id = sm.src_id
152 (SELECT source FROM src_associations
153 WHERE suite = :suite_id)
154 AND sm.key_id in :metakey_ids
155 GROUP BY s.id, s.source'''
156 query = session.query('source', 'build_dep').from_statement(statement). \
158 for source, build_dep in query:
159 if build_dep is not None:
160 # Remove [arch] information since we want to see breakage on all arches
161 build_dep = re_build_dep_arch.sub("", build_dep)
164 for dep in apt_pkg.parse_src_depends(build_dep):
165 parsed_dep.append(frozenset(d[0] for d in dep))
166 source_deps[source].update(parsed_dep)
167 except ValueError as e:
168 print "Error for package %s: %s" % (source, e)
170 return package_dependencies, arch_providors_of, arch_provided_by
172 def check_reverse_depends(self, removal_requests):
173 """Bulk check reverse dependencies
177 "eclipse-rcp": None, # means ALL architectures (incl. source)
178 "eclipse": None, # means ALL architectures (incl. source)
179 "lintian": ["source", "all"], # Only these two "architectures".
181 obj.check_reverse_depends(removal_request)
183 @type removal_requests: dict (or a list of tuples)
184 @param removal_requests: A dictionary mapping a package name to a list of architectures. The list of
185 architectures decides from which the package will be removed - if the list is empty the package will
186 be removed on ALL architectures in the suite (including "source").
189 @return: A mapping of "removed package" (as a "(pkg, arch)"-tuple) to a set of broken
190 broken packages (also as "(pkg, arch)"-tuple). Note that the architecture values
191 in these tuples /can/ be "source" to reflect a breakage in build-dependencies.
194 archs_in_suite = self._archs_in_suite
195 removals_by_arch = defaultdict(set)
196 affected_virtual_by_arch = defaultdict(set)
197 package_dependencies = self._package_dependencies
198 arch_providors_of = self._arch_providors_of
199 arch_provided_by = self._arch_provided_by
200 arch_provides2removal = defaultdict(lambda: defaultdict(set))
201 dep_problems = defaultdict(set)
202 src_deps = package_dependencies['source']
204 arch_all_removals = set()
206 if isinstance(removal_requests, dict):
207 removal_requests = removal_requests.iteritems()
209 for pkg, arch_list in removal_requests:
211 arch_list = archs_in_suite
212 for arch in arch_list:
214 src_removals.add(pkg)
217 arch_all_removals.add(pkg)
219 removals_by_arch[arch].add(pkg)
220 if pkg in arch_providors_of[arch]:
221 affected_virtual_by_arch[arch].add(pkg)
223 if arch_all_removals:
224 for arch in archs_in_suite:
225 if arch in ('all', 'source'):
227 removals_by_arch[arch].update(arch_all_removals)
228 for pkg in arch_all_removals:
229 if pkg in arch_providors_of[arch]:
230 affected_virtual_by_arch[arch].add(pkg)
232 if not removals_by_arch:
233 # Nothing to remove => no problems
236 for arch, removed_providers in affected_virtual_by_arch.iteritems():
237 provides2removal = arch_provides2removal[arch]
238 removals = removals_by_arch[arch]
239 for virtual_pkg, virtual_providers in arch_provided_by[arch].iteritems():
240 v = virtual_providers & removed_providers
241 if len(v) == len(virtual_providers):
242 # We removed all the providers of virtual_pkg
243 removals.add(virtual_pkg)
244 # Pick one to take the blame for the removal
245 # - we sort for determinism, optimally we would prefer to blame the same package
246 # to minimise the number of blamed packages.
247 provides2removal[virtual_pkg] = sorted(v)[0]
249 for arch, removals in removals_by_arch.iteritems():
250 deps = package_dependencies[arch]
251 provides2removal = arch_provides2removal[arch]
253 # Check binary dependencies (Depends)
254 for package, dependencies in deps.iteritems():
255 if package in removals:
257 for clause in dependencies:
258 if not (clause <= removals):
259 # Something probably still satisfies this relation
261 # whoops, we seemed to have removed all packages that could possibly satisfy
262 # this relation. Lets blame something for it
263 for dep_package in clause:
264 removal = dep_package
265 if dep_package in provides2removal:
266 removal = provides2removal[dep_package]
267 dep_problems[(removal, arch)].add((package, arch))
269 for source, build_dependencies in src_deps.iteritems():
270 if source in src_removals:
272 for clause in build_dependencies:
273 if not (clause <= removals):
274 # Something probably still satisfies this relation
276 # whoops, we seemed to have removed all packages that could possibly satisfy
277 # this relation. Lets blame something for it
278 for dep_package in clause:
279 removal = dep_package
280 if dep_package in provides2removal:
281 removal = provides2removal[dep_package]
282 dep_problems[(removal, arch)].add((source, 'source'))
287 def remove(session, reason, suites, removals,
288 whoami=None, partial=False, components=None, done_bugs=None, date=None,
289 carbon_copy=None, close_related_bugs=False):
290 """Batch remove a number of packages
291 Verify that the files listed in the Files field of the .dsc are
292 those expected given the announced Format.
294 @type session: SQLA Session
295 @param session: The database session in use
298 @param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)")
301 @param suites: A list of the suite names in which the removal should occur
304 @param removals: A list of the removals. Each element should be a tuple (or list) of at least the following
305 for 4 items from the database (in order): package, version, architecture, (database) id.
306 For source packages, the "architecture" should be set to "source".
309 @param partial: Whether the removal is "partial" (e.g. architecture specific).
311 @type components: list
312 @param components: List of components involved in a partial removal. Can be an empty list to not restrict the
313 removal to any components.
316 @param whoami: The person (or entity) doing the removal. Defaults to utils.whoami()
319 @param date: The date of the removal. Defaults to commands.getoutput("date -R")
321 @type done_bugs: list
322 @param done_bugs: A list of bugs to be closed when doing this removal.
324 @type close_related_bugs: bool
325 @param done_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented
326 for more than one suite.
328 @type carbon_copy: list
329 @param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike
335 # Generate the summary of what's to be removed
343 suites_list = utils.join_with_commas_and(suites)
344 cnf = utils.get_conf()
345 con_components = None
347 #######################################################################################################
350 raise ValueError("Empty removal reason not permitted")
353 raise ValueError("Nothing to remove!?")
356 raise ValueError("Removals without a suite!?")
359 whoami = utils.whoami()
362 date = commands.getoutput("date -R")
366 component_ids_list = []
367 for componentname in components:
368 component = get_component(componentname, session=session)
369 if component is None:
370 raise ValueError("component '%s' not recognised." % componentname)
372 component_ids_list.append(component.component_id)
373 con_components = "AND component IN (%s)" % ", ".join([str(i) for i in component_ids_list])
381 if version not in d[package]:
382 d[package][version] = []
383 if architecture not in d[package][version]:
384 d[package][version].append(architecture)
386 for package in sorted(removals):
387 versions = sorted(d[package], cmp=apt_pkg.version_compare)
388 for version in versions:
389 d[package][version].sort(utils.arch_compare_sw)
390 summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
392 for package in summary.split("\n"):
393 for row in package.split("\n"):
394 element = row.split("|")
395 if len(element) == 3:
396 if element[2].find("source") > 0:
397 sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2]))
398 element[2] = sub("source\s?,?", "", element[2]).strip(" ")
400 binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element))
402 dsc_type_id = get_override_type('dsc', session).overridetype_id
403 deb_type_id = get_override_type('deb', session).overridetype_id
406 s = get_suite(suite, session=session)
408 suite_ids_list.append(s.suite_id)
409 whitelists.append(s.mail_whitelist)
411 #######################################################################################################
412 log_filename = cnf["Rm::LogFile"]
413 log822_filename = cnf["Rm::LogFile822"]
414 with utils.open_file(log_filename, "a") as logfile, utils.open_file(log822_filename, "a") as logfile822:
415 logfile.write("=========================================================================\n")
416 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
417 logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
419 logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs)))
420 logfile.write("\n------------------- Reason -------------------\n%s\n" % reason)
421 logfile.write("----------------------------------------------\n")
423 logfile822.write("Date: %s\n" % date)
424 logfile822.write("Ftpmaster: %s\n" % whoami)
425 logfile822.write("Suite: %s\n" % suites_list)
428 logfile822.write("Sources:\n")
429 for source in sources:
430 logfile822.write(" %s\n" % source)
433 logfile822.write("Binaries:\n")
434 for binary in binaries:
435 logfile822.write(" %s\n" % binary)
437 logfile822.write("Reason: %s\n" % reason.replace('\n', '\n '))
439 logfile822.write("Bug: %s\n" % (", ".join(done_bugs)))
445 for suite_id in suite_ids_list:
446 if architecture == "source":
447 session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid",
448 {'packageid': package_id, 'suiteid': suite_id})
450 session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid",
451 {'packageid': package_id, 'suiteid': suite_id})
452 # Delete from the override file
454 if architecture == "source":
455 type_id = dsc_type_id
457 type_id = deb_type_id
458 # TODO: Fix this properly to remove the remaining non-bind argument
459 session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id})
462 # ### REMOVAL COMPLETE - send mail time ### #
464 # If we don't have a Bug server configured, we're done
465 if "Dinstall::BugServer" not in cnf:
466 if done_bugs or close_related_bugs:
467 utils.warn("Cannot send mail to BugServer as Dinstall::BugServer is not configured")
469 logfile.write("=========================================================================\n")
470 logfile822.write("\n")
473 # read common subst variables for all bug closure mails
475 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
476 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
477 Subst_common["__CC__"] = "X-DAK: dak rm"
479 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
480 Subst_common["__SUITE_LIST__"] = suites_list
481 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
482 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
483 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
484 Subst_common["__WHOAMI__"] = whoami
486 # Send the bug closing messages
488 Subst_close_rm = Subst_common
490 if cnf.find("Dinstall::Bcc") != "":
491 bcc.append(cnf["Dinstall::Bcc"])
492 if cnf.find("Rm::Bcc") != "":
493 bcc.append(cnf["Rm::Bcc"])
495 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
497 Subst_close_rm["__BCC__"] = "X-Filler: 42"
498 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, reason)
499 summarymail += "----------------------------------------------\n"
500 Subst_close_rm["__SUMMARY__"] = summarymail
502 for bug in done_bugs:
503 Subst_close_rm["__BUG_NUMBER__"] = bug
504 if close_related_bugs:
505 mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related")
507 mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close")
508 utils.send_mail(mail_message, whitelists=whitelists)
510 # close associated bug reports
511 if close_related_bugs:
512 Subst_close_other = Subst_common
514 wnpp = utils.parse_wnpp_bug_file()
515 versions = list(set([re_bin_only_nmu.sub('', v) for v in versions]))
516 if len(versions) == 1:
517 Subst_close_other["__VERSION__"] = versions[0]
519 logfile.write("=========================================================================\n")
520 logfile822.write("\n")
521 raise ValueError("Closing bugs with multiple package versions is not supported. Do it yourself.")
523 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
525 Subst_close_other["__BCC__"] = "X-Filler: 42"
526 # at this point, I just assume, that the first closed bug gives
527 # some useful information on why the package got removed
528 Subst_close_other["__BUG_NUMBER__"] = done_bugs[0]
529 if len(sources) == 1:
530 source_pkg = source.split("_", 1)[0]
532 logfile.write("=========================================================================\n")
533 logfile822.write("\n")
534 raise ValueError("Closing bugs for multiple source packages is not supported. Please do it yourself.")
535 Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
536 Subst_close_other["__SOURCE__"] = source_pkg
538 other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded')
540 for bugno in other_bugs:
541 if bugno not in merged_bugs:
542 for bug in bts.get_status(bugno):
543 for merged in bug.mergedwith:
544 other_bugs.remove(merged)
545 merged_bugs.add(merged)
546 logfile.write("Also closing bug(s):")
547 logfile822.write("Also-Bugs:")
548 for bug in other_bugs:
549 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
550 logfile.write(" " + str(bug))
551 logfile822.write(" " + str(bug))
553 logfile822.write("\n")
554 if source_pkg in wnpp:
555 logfile.write("Also closing WNPP bug(s):")
556 logfile822.write("Also-WNPP:")
557 for bug in wnpp[source_pkg]:
558 # the wnpp-rm file we parse also contains our removal
559 # bugs, filtering that out
560 if bug != Subst_close_other["__BUG_NUMBER__"]:
561 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
562 logfile.write(" " + str(bug))
563 logfile822.write(" " + str(bug))
565 logfile822.write("\n")
567 mail_message = utils.TemplateSubst(Subst_close_other, cnf["Dir::Templates"]+"/rm.bug-close-related")
568 if Subst_close_other["__BUG_NUMBER_ALSO__"]:
569 utils.send_mail(mail_message)
571 logfile.write("=========================================================================\n")
572 logfile822.write("\n")