3 """ General purpose package removal tool for ftpmaster """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
5 # Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ################################################################################
23 # o OpenBSD team wants to get changes incorporated into IPF. Darren no
25 # o Ask again -> No respond. Darren coder supreme.
26 # o OpenBSD decide to make changes, but only in OpenBSD source
27 # tree. Darren hears, gets angry! Decides: "LICENSE NO ALLOW!"
29 # o OpenBSD team decide to switch to different packet filter under BSD
30 # license. Because Project Goal: Every user should be able to make
31 # changes to source tree. IPF license bad!!
32 # o Darren try get back: says, NetBSD, FreeBSD allowed! MUAHAHAHAH!!!
33 # o Theo say: no care, pf much better than ipf!
34 # o Darren changes mind: changes license. But OpenBSD will not change
35 # back to ipf. Darren even much more bitter.
36 # o Darren so bitterbitter. Decides: I'LL GET BACK BY FORKING OPENBSD AND
37 # RELEASING MY OWN VERSION. HEHEHEHEHE.
39 # http://slashdot.org/comments.pl?sid=26697&cid=2883271
41 ################################################################################
50 from daklib.config import Config
51 from daklib.dbconn import *
52 from daklib import utils
53 from daklib.dak_exceptions import *
54 from daklib.regexes import re_strip_source_version, re_build_dep_arch
55 import debianbts as bts
57 ################################################################################
61 ################################################################################
63 def usage (exit_code=0):
64 print """Usage: dak rm [OPTIONS] PACKAGE[...]
65 Remove PACKAGE(s) from suite(s).
67 -a, --architecture=ARCH only act on this architecture
68 -b, --binary remove binaries only
69 -c, --component=COMPONENT act on this component
70 -C, --carbon-copy=EMAIL send a CC of removal message to EMAIL
71 -d, --done=BUG# send removal message as closure to bug#
72 -D, --do-close also close all bugs associated to that package
73 -h, --help show this help and exit
74 -m, --reason=MSG reason for removal
75 -n, --no-action don't do anything
76 -p, --partial don't affect override files
77 -R, --rdep-check check reverse dependencies
78 -s, --suite=SUITE act on this suite
79 -S, --source-only remove source only
81 ARCH, BUG#, COMPONENT and SUITE can be comma (or space) separated lists, e.g.
82 --architecture=amd64,i386"""
86 ################################################################################
88 # "Hudson: What that's great, that's just fucking great man, now what
89 # the fuck are we supposed to do? We're in some real pretty shit now
90 # man...That's it man, game over man, game over, man! Game over! What
91 # the fuck are we gonna do now? What are we gonna do?"
94 answer = utils.our_raw_input("Continue (y/N)? ").lower()
99 ################################################################################
101 def reverse_depends_check(removals, suite, arches=None, session=None):
102 dbsuite = get_suite(suite, session)
105 print "Checking reverse dependencies..."
110 all_arches = set(arches)
112 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
113 all_arches -= set(["source", "all"])
114 metakey_d = get_or_set_metadatakey("Depends", session)
115 metakey_p = get_or_set_metadatakey("Provides", session)
117 'suite_id': dbsuite.suite_id,
118 'metakey_d_id': metakey_d.key_id,
119 'metakey_p_id': metakey_p.key_id,
120 'arch_all_id' : get_architecture('all', session).arch_id,
122 for architecture in all_arches:
125 virtual_packages = {}
126 params['arch_id'] = get_architecture(architecture, session).arch_id
129 create temp table suite_binaries (
130 id integer primary key,
134 insert into suite_binaries
135 select b.id, b.package, b.source, b.file
136 from binaries b WHERE b.id in
137 (SELECT bin FROM bin_associations WHERE suite = :suite_id)
138 AND b.architecture in (:arch_id, :arch_all_id);
139 SELECT b.id, b.package, s.source, c.name as component,
140 bmd.value as depends, bmp.value as provides
141 FROM suite_binaries b
142 LEFT OUTER JOIN binaries_metadata bmd
143 ON b.id = bmd.bin_id AND bmd.key_id = :metakey_d_id
144 LEFT OUTER JOIN binaries_metadata bmp
145 ON b.id = bmp.bin_id AND bmp.key_id = :metakey_p_id
146 JOIN source s ON b.source = s.id
147 JOIN files f ON b.file = f.id
148 JOIN location l ON f.location = l.id
149 JOIN component c ON l.component = c.id'''
151 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
152 from_statement(statement).params(params)
153 for binary_id, package, source, component, depends, provides in query:
154 sources[package] = source
155 p2c[package] = component
156 if depends is not None:
157 deps[package] = depends
158 # Maintain a counter for each virtual package. If a
159 # Provides: exists, set the counter to 0 and count all
160 # provides by a package not in the list for removal.
161 # If the counter stays 0 at the end, we know that only
162 # the to-be-removed packages provided this virtual
164 if provides is not None:
165 for virtual_pkg in provides.split(","):
166 virtual_pkg = virtual_pkg.strip()
167 if virtual_pkg == package: continue
168 if not virtual_packages.has_key(virtual_pkg):
169 virtual_packages[virtual_pkg] = 0
170 if package not in removals:
171 virtual_packages[virtual_pkg] += 1
173 # If a virtual package is only provided by the to-be-removed
174 # packages, treat the virtual package as to-be-removed too.
175 for virtual_pkg in virtual_packages.keys():
176 if virtual_packages[virtual_pkg] == 0:
177 removals.append(virtual_pkg)
179 # Check binary dependencies (Depends)
180 for package in deps.keys():
181 if package in removals: continue
184 parsed_dep += apt_pkg.ParseDepends(deps[package])
185 except ValueError, e:
186 print "Error for package %s: %s" % (package, e)
187 for dep in parsed_dep:
188 # Check for partial breakage. If a package has a ORed
189 # dependency, there is only a dependency problem if all
190 # packages in the ORed depends will be removed.
192 for dep_package, _, _ in dep:
193 if dep_package in removals:
195 if unsat == len(dep):
196 component = p2c[package]
197 source = sources[package]
198 if component != "main":
199 source = "%s/%s" % (source, component)
200 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
204 print "# Broken Depends:"
205 for source, bindict in sorted(all_broken.items()):
207 for binary, arches in sorted(bindict.items()):
208 if arches == all_arches:
211 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
212 print '%s: %s' % (source, lines[0])
213 for line in lines[1:]:
214 print ' ' * (len(source) + 2) + line
217 # Check source dependencies (Build-Depends and Build-Depends-Indep)
219 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
220 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
222 'suite_id': dbsuite.suite_id,
223 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
226 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
228 JOIN source_metadata sm ON s.id = sm.src_id
230 (SELECT source FROM src_associations
231 WHERE suite = :suite_id)
232 AND sm.key_id in :metakey_ids
233 GROUP BY s.id, s.source'''
234 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
236 for source_id, source, build_dep in query:
237 if source in removals: continue
239 if build_dep is not None:
240 # Remove [arch] information since we want to see breakage on all arches
241 build_dep = re_build_dep_arch.sub("", build_dep)
243 parsed_dep += apt_pkg.ParseDepends(build_dep)
244 except ValueError, e:
245 print "Error for source %s: %s" % (source, e)
246 for dep in parsed_dep:
248 for dep_package, _, _ in dep:
249 if dep_package in removals:
251 if unsat == len(dep):
252 component = DBSource.get(source_id, session).get_component_name()
253 if component != "main":
254 source = "%s/%s" % (source, component)
255 all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
259 print "# Broken Build-Depends:"
260 for source, bdeps in sorted(all_broken.items()):
261 bdeps = sorted(bdeps)
262 print '%s: %s' % (source, bdeps[0])
263 for bdep in bdeps[1:]:
264 print ' ' * (len(source) + 2) + bdep
268 print "Dependency problem found."
269 if not Options["No-Action"]:
272 print "No dependency problem found."
275 ################################################################################
282 Arguments = [('h',"help","Rm::Options::Help"),
283 ('a',"architecture","Rm::Options::Architecture", "HasArg"),
284 ('b',"binary", "Rm::Options::Binary-Only"),
285 ('c',"component", "Rm::Options::Component", "HasArg"),
286 ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc
287 ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed
288 ('D',"do-close","Rm::Options::Do-Close"),
289 ('R',"rdep-check", "Rm::Options::Rdep-Check"),
290 ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason
291 ('n',"no-action","Rm::Options::No-Action"),
292 ('p',"partial", "Rm::Options::Partial"),
293 ('s',"suite","Rm::Options::Suite", "HasArg"),
294 ('S',"source-only", "Rm::Options::Source-Only"),
297 for i in [ "architecture", "binary-only", "carbon-copy", "component",
298 "done", "help", "no-action", "partial", "rdep-check", "reason",
299 "source-only", "Do-Close" ]:
300 if not cnf.has_key("Rm::Options::%s" % (i)):
301 cnf["Rm::Options::%s" % (i)] = ""
302 if not cnf.has_key("Rm::Options::Suite"):
303 cnf["Rm::Options::Suite"] = "unstable"
305 arguments = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
306 Options = cnf.SubTree("Rm::Options")
311 session = DBConn().session()
313 # Sanity check options
315 utils.fubar("need at least one package name as an argument.")
316 if Options["Architecture"] and Options["Source-Only"]:
317 utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.")
318 if Options["Binary-Only"] and Options["Source-Only"]:
319 utils.fubar("can't use -b/--binary-only and -S/--source-only options simultaneously.")
320 if Options.has_key("Carbon-Copy") and not Options.has_key("Done"):
321 utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.")
322 if Options["Architecture"] and not Options["Partial"]:
323 utils.warn("-a/--architecture implies -p/--partial.")
324 Options["Partial"] = "true"
325 if Options["Do-Close"] and not Options["Done"]:
327 if Options["Do-Close"] and Options["Binary-Only"]:
329 if Options["Do-Close"] and Options["Source-Only"]:
331 if Options["Do-Close"] and Options["Suite"] != 'unstable':
334 # Force the admin to tell someone if we're not doing a 'dak
335 # cruft-report' inspired removal (or closing a bug, which counts
336 # as telling someone).
337 if not Options["No-Action"] and not Options["Carbon-Copy"] \
338 and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1:
339 utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.")
341 # Process -C/--carbon-copy
343 # Accept 3 types of arguments (space separated):
344 # 1) a number - assumed to be a bug number, i.e. nnnnn@bugs.debian.org
345 # 2) the keyword 'package' - cc's $package@packages.debian.org for every argument
346 # 3) contains a '@' - assumed to be an email address, used unmofidied
349 for copy_to in utils.split_args(Options.get("Carbon-Copy")):
350 if copy_to.isdigit():
351 if cnf.has_key("Dinstall::BugServer"):
352 carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"])
354 utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to)
355 elif copy_to == 'package':
356 for package in arguments:
357 if cnf.has_key("Dinstall::PackagesServer"):
358 carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"])
359 if cnf.has_key("Dinstall::TrackingServer"):
360 carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"])
362 carbon_copy.append(copy_to)
364 utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to))
366 if Options["Binary-Only"]:
370 con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ]))
372 (con_suites, con_architectures, con_components, check_source) = \
373 utils.parse_args(Options)
375 # Additional suite checks
377 suites = utils.split_args(Options["Suite"])
378 suites_list = utils.join_with_commas_and(suites)
379 if not Options["No-Action"]:
381 s = get_suite(suite, session=session)
383 suite_ids_list.append(s.suite_id)
384 if suite in ("oldstable", "stable"):
385 print "**WARNING** About to remove from the (old)stable suite!"
386 print "This should only be done just prior to a (point) release and not at"
387 print "any other time."
389 elif suite == "testing":
390 print "**WARNING About to remove from the testing suite!"
391 print "There's no need to do this normally as removals from unstable will"
392 print "propogate to testing automagically."
395 # Additional architecture checks
396 if Options["Architecture"] and check_source:
397 utils.warn("'source' in -a/--argument makes no sense and is ignored.")
399 # Additional component processing
400 over_con_components = con_components.replace("c.id", "component")
402 # Don't do dependency checks on multiple suites
403 if Options["Rdep-Check"] and len(suites) > 1:
404 utils.fubar("Reverse dependency check on multiple suites is not implemented.")
411 # We have 3 modes of package selection: binary-only, source-only
412 # and source+binary. The first two are trivial and obvious; the
413 # latter is a nasty mess, but very nice from a UI perspective so
414 # we try to support it.
416 # XXX: TODO: This all needs converting to use placeholders or the object
417 # API. It's an SQL injection dream at the moment
419 if Options["Binary-Only"]:
421 q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures))
422 for i in q.fetchall():
427 q = session.execute("SELECT l.path, f.filename, s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, files f, location l, component c WHERE sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
428 for i in q.fetchall():
429 source_packages[i[2]] = i[:2]
430 to_remove.append(i[2:])
431 if not Options["Source-Only"]:
434 # First get a list of binary package names we suspect are linked to the source
435 q = session.execute("SELECT DISTINCT b.package FROM binaries b, source s, src_associations sa, suite su, files f, location l, component c WHERE b.source = s.id AND sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
436 for i in q.fetchall():
437 binary_packages[i[0]] = ""
438 # Then parse each .dsc that we found earlier to see what binary packages it thinks it produces
439 for i in source_packages.keys():
440 filename = "/".join(source_packages[i])
442 dsc = utils.parse_changes(filename, dsc_file=1)
443 except CantOpenError:
444 utils.warn("couldn't open '%s'." % (filename))
446 for package in dsc.get("binary").split(','):
447 package = package.strip()
448 binary_packages[package] = ""
449 # Then for each binary package: find any version in
450 # unstable, check the Source: field in the deb matches our
451 # source package and if so add it to the list of packages
453 for package in binary_packages.keys():
454 q = session.execute("SELECT l.path, f.filename, b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s AND b.package = '%s'" % (con_suites, con_components, con_architectures, package))
455 for i in q.fetchall():
456 filename = "/".join(i[:2])
457 control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(filename)))
458 source = control.Find("Source", control.Find("Package"))
459 source = re_strip_source_version.sub('', source)
460 if source_packages.has_key(source):
461 to_remove.append(i[2:])
465 print "Nothing to do."
468 # If we don't have a reason; spawn an editor so the user can add one
469 # Write the rejection email out as the <foo>.reason file
470 if not Options["Reason"] and not Options["No-Action"]:
471 (fd, temp_filename) = utils.temp_filename()
472 editor = os.environ.get("EDITOR","vi")
473 result = os.system("%s %s" % (editor, temp_filename))
475 utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
476 temp_file = utils.open_file(temp_filename)
477 for line in temp_file.readlines():
478 Options["Reason"] += line
480 os.unlink(temp_filename)
482 # Generate the summary of what's to be removed
489 maintainers[maintainer] = ""
490 if not d.has_key(package):
492 if not d[package].has_key(version):
493 d[package][version] = []
494 if architecture not in d[package][version]:
495 d[package][version].append(architecture)
498 for maintainer_id in maintainers.keys():
499 maintainer_list.append(get_maintainer(maintainer_id).name)
504 for package in removals:
505 versions = d[package].keys()
506 versions.sort(apt_pkg.VersionCompare)
507 for version in versions:
508 d[package][version].sort(utils.arch_compare_sw)
509 summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
510 print "Will remove the following packages from %s:" % (suites_list)
513 print "Maintainer: %s" % ", ".join(maintainer_list)
515 print "Will also close bugs: "+Options["Done"]
517 print "Will also send CCs to: " + ", ".join(carbon_copy)
518 if Options["Do-Close"]:
519 print "Will also close associated bug reports."
521 print "------------------- Reason -------------------"
522 print Options["Reason"]
523 print "----------------------------------------------"
526 if Options["Rdep-Check"]:
527 arches = utils.split_args(Options["Architecture"])
528 reverse_depends_check(removals, suites[0], arches, session)
530 # If -n/--no-action, drop out here
531 if Options["No-Action"]:
534 print "Going to remove the packages now."
537 whoami = utils.whoami()
538 date = commands.getoutput('date -R')
540 # Log first; if it all falls apart I want a record that we at least tried.
541 logfile = utils.open_file(cnf["Rm::LogFile"], 'a')
542 logfile.write("=========================================================================\n")
543 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
544 logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
546 logfile.write("Closed bugs: %s\n" % (Options["Done"]))
547 logfile.write("\n------------------- Reason -------------------\n%s\n" % (Options["Reason"]))
548 logfile.write("----------------------------------------------\n")
550 # Do the same in rfc822 format
551 logfile822 = utils.open_file(cnf["Rm::LogFile822"], 'a')
552 logfile822.write("Date: %s\n" % date)
553 logfile822.write("Ftpmaster: %s\n" % whoami)
554 logfile822.write("Suite: %s\n" % suites_list)
557 for package in summary.split("\n"):
558 for row in package.split("\n"):
559 element = row.split("|")
560 if len(element) == 3:
561 if element[2].find("source") > 0:
562 sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2]))
563 element[2] = sub("source\s?,?", "", element[2]).strip(" ")
565 binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element))
567 logfile822.write("Sources:\n")
568 for source in sources:
569 logfile822.write(" %s\n" % source)
571 logfile822.write("Binaries:\n")
572 for binary in binaries:
573 logfile822.write(" %s\n" % binary)
574 logfile822.write("Reason: %s\n" % Options["Reason"].replace('\n', '\n '))
576 logfile822.write("Bug: %s\n" % Options["Done"])
578 dsc_type_id = get_override_type('dsc', session).overridetype_id
579 deb_type_id = get_override_type('deb', session).overridetype_id
581 # Do the actual deletion
589 for suite_id in suite_ids_list:
590 if architecture == "source":
591 session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid",
592 {'packageid': package_id, 'suiteid': suite_id})
593 #print "DELETE FROM src_associations WHERE source = %s AND suite = %s" % (package_id, suite_id)
595 session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid",
596 {'packageid': package_id, 'suiteid': suite_id})
597 #print "DELETE FROM bin_associations WHERE bin = %s AND suite = %s" % (package_id, suite_id)
598 # Delete from the override file
599 if not Options["Partial"]:
600 if architecture == "source":
601 type_id = dsc_type_id
603 type_id = deb_type_id
604 # TODO: Again, fix this properly to remove the remaining non-bind argument
605 session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (over_con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id})
609 # If we don't have a Bug server configured, we're done
610 if not cnf.has_key("Dinstall::BugServer"):
611 if Options["Done"] or Options["Do-Close"]:
612 print "Cannot send mail to BugServer as Dinstall::BugServer is not configured"
614 logfile.write("=========================================================================\n")
617 logfile822.write("\n")
622 # read common subst variables for all bug closure mails
624 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
625 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
626 Subst_common["__CC__"] = "X-DAK: dak rm"
628 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
629 Subst_common["__SUITE_LIST__"] = suites_list
630 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
631 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
632 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
633 Subst_common["__WHOAMI__"] = whoami
635 # Send the bug closing messages
637 Subst_close_rm = Subst_common
639 if cnf.Find("Dinstall::Bcc") != "":
640 bcc.append(cnf["Dinstall::Bcc"])
641 if cnf.Find("Rm::Bcc") != "":
642 bcc.append(cnf["Rm::Bcc"])
644 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
646 Subst_close_rm["__BCC__"] = "X-Filler: 42"
647 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"])
648 summarymail += "----------------------------------------------\n"
649 Subst_close_rm["__SUMMARY__"] = summarymail
651 whereami = utils.where_am_i()
652 Archive = get_archive(whereami, session)
654 utils.warn("Cannot find archive %s. Setting blank values for origin" % whereami)
655 Subst_close_rm["__MASTER_ARCHIVE__"] = ""
656 Subst_close_rm["__PRIMARY_MIRROR__"] = ""
658 Subst_close_rm["__MASTER_ARCHIVE__"] = Archive.origin_server
659 Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror
661 for bug in utils.split_args(Options["Done"]):
662 Subst_close_rm["__BUG_NUMBER__"] = bug
663 if Options["Do-Close"]:
664 mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related")
666 mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close")
667 utils.send_mail(mail_message)
669 # close associated bug reports
670 if Options["Do-Close"]:
671 Subst_close_other = Subst_common
673 wnpp = utils.parse_wnpp_bug_file()
674 if len(versions) == 1:
675 Subst_close_other["__VERSION__"] = versions[0]
677 utils.fubar("Closing bugs with multiple package versions is not supported. Do it yourself.")
679 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
681 Subst_close_other["__BCC__"] = "X-Filler: 42"
682 # at this point, I just assume, that the first closed bug gives
683 # some useful information on why the package got removed
684 Subst_close_other["__BUG_NUMBER__"] = utils.split_args(Options["Done"])[0]
685 if len(sources) == 1:
686 source_pkg = source.split("_", 1)[0]
688 utils.fubar("Closing bugs for multiple source pakcages is not supported. Do it yourself.")
689 Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
690 Subst_close_other["__SOURCE__"] = source_pkg
691 other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open')
693 logfile.write("Also closing bug(s):")
694 logfile822.write("Also-Bugs:")
695 for bug in other_bugs:
696 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
697 logfile.write(" " + str(bug))
698 logfile822.write(" " + str(bug))
700 logfile822.write("\n")
701 if source_pkg in wnpp.keys():
702 logfile.write("Also closing WNPP bug(s):")
703 logfile822.write("Also-WNPP:")
704 for bug in wnpp[source_pkg]:
705 # the wnpp-rm file we parse also contains our removal
706 # bugs, filtering that out
707 if bug != Subst_close_other["__BUG_NUMBER__"]:
708 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
709 logfile.write(" " + str(bug))
710 logfile822.write(" " + str(bug))
712 logfile822.write("\n")
714 mail_message = utils.TemplateSubst(Subst_close_other,cnf["Dir::Templates"]+"/rm.bug-close-related")
715 if Subst_close_other["__BUG_NUMBER_ALSO__"]:
716 utils.send_mail(mail_message)
719 logfile.write("=========================================================================\n")
722 logfile822.write("\n")
725 #######################################################################################
727 if __name__ == '__main__':