5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from dak_exceptions import *
43 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu, re_strip_srcver, re_valid_pkg_name, re_isanum, re_no_epoch, re_no_revision
44 from config import Config
46 from summarystats import SummaryStats
47 from utils import parse_changes
48 from textutils import fix_maintainer
50 ###############################################################################
52 def get_type(f, session=None):
54 Get the file type of C{f}
57 @param f: file entry from Changes object
64 session = DBConn().session()
67 if f.has_key("dbtype"):
68 file_type = file["dbtype"]
69 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
72 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
74 # Validate the override type
75 type_id = get_override_type(file_type, session)
77 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
81 ################################################################################
83 # Determine what parts in a .changes are NEW
85 def determine_new(changes, files, warn=1):
87 Determine what parts in a C{changes} file are NEW.
89 @type changes: Upload.Pkg.changes dict
90 @param changes: Changes dictionary
92 @type files: Upload.Pkg.files dict
93 @param files: Files dictionary
96 @param warn: Warn if overrides are added for (old)stable
99 @return: dictionary of NEW components.
104 session = DBConn().session()
106 # Build up a list of potentially new things
107 for name, f in files.items():
108 # Skip byhand elements
109 if f["type"] == "byhand":
112 priority = f["priority"]
113 section = f["section"]
114 file_type = get_type(f)
115 component = f["component"]
117 if file_type == "dsc":
120 if not new.has_key(pkg):
122 new[pkg]["priority"] = priority
123 new[pkg]["section"] = section
124 new[pkg]["type"] = file_type
125 new[pkg]["component"] = component
126 new[pkg]["files"] = []
128 old_type = new[pkg]["type"]
129 if old_type != file_type:
130 # source gets trumped by deb or udeb
131 if old_type == "dsc":
132 new[pkg]["priority"] = priority
133 new[pkg]["section"] = section
134 new[pkg]["type"] = file_type
135 new[pkg]["component"] = component
137 new[pkg]["files"].append(name)
139 if f.has_key("othercomponents"):
140 new[pkg]["othercomponents"] = f["othercomponents"]
142 for suite in changes["suite"].keys():
143 for pkg in new.keys():
144 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
146 for file_entry in new[pkg]["files"]:
147 if files[file_entry].has_key("new"):
148 del files[file_entry]["new"]
152 for s in ['stable', 'oldstable']:
153 if changes["suite"].has_key(s):
154 print "WARNING: overrides will be added for %s!" % s
155 for pkg in new.keys():
156 if new[pkg].has_key("othercomponents"):
157 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
161 ################################################################################
163 def check_valid(new):
165 Check if section and priority for NEW packages exist in database.
166 Additionally does sanity checks:
167 - debian-installer packages have to be udeb (or source)
168 - non debian-installer packages can not be udeb
169 - source priority can only be assigned to dsc file types
172 @param new: Dict of new packages with their section, priority and type.
175 for pkg in new.keys():
176 section_name = new[pkg]["section"]
177 priority_name = new[pkg]["priority"]
178 file_type = new[pkg]["type"]
180 section = get_section(section_name)
182 new[pkg]["section id"] = -1
184 new[pkg]["section id"] = section.section_id
186 priority = get_priority(priority_name)
188 new[pkg]["priority id"] = -1
190 new[pkg]["priority id"] = priority.priority_id
193 di = section_name.find("debian-installer") != -1
195 # If d-i, we must be udeb and vice-versa
196 if (di and file_type not in ("udeb", "dsc")) or \
197 (not di and file_type == "udeb"):
198 new[pkg]["section id"] = -1
200 # If dsc we need to be source and vice-versa
201 if (priority == "source" and file_type != "dsc") or \
202 (priority != "source" and file_type == "dsc"):
203 new[pkg]["priority id"] = -1
205 ###############################################################################
207 class Upload(object):
209 Everything that has to do with an upload processed.
216 ###########################################################################
219 """ Reset a number of internal variables."""
221 # Initialize the substitution template map
224 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
225 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
226 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
227 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
235 def package_info(self):
238 if len(self.rejects) > 0:
239 msg += "Reject Reasons:\n"
240 msg += "\n".join(self.rejects)
242 if len(self.warnings) > 0:
244 msg += "\n".join(self.warnings)
246 if len(self.notes) > 0:
248 msg += "\n".join(self.notes)
252 ###########################################################################
253 def update_subst(self):
254 """ Set up the per-package template substitution mappings """
258 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
259 if not self.pkg.changes.has_key("architecture") or not \
260 isinstance(changes["architecture"], DictType):
261 self.pkg.changes["architecture"] = { "Unknown" : "" }
263 # and maintainer2047 may not exist.
264 if not self.pkg.changes.has_key("maintainer2047"):
265 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
267 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
268 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
269 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
271 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
272 if self.pkg.changes["architecture"].has_key("source") and \
273 self.pkg.changes["changedby822"] != "" and \
274 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
276 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
277 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
278 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
280 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
281 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
282 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
284 if "sponsoremail" in self.pkg.changes:
285 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
287 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
288 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
290 # Apply any global override of the Maintainer field
291 if cnf.get("Dinstall::OverrideMaintainer"):
292 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
293 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
295 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
296 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
297 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
299 ###########################################################################
300 def load_changes(self, filename):
303 @rvalue: whether the changes file was valid or not. We may want to
304 reject even if this is True (see what gets put in self.rejects).
305 This is simply to prevent us even trying things later which will
306 fail because we couldn't properly parse the file.
308 self.pkg.changes_file = filename
310 # Parse the .changes field into a dictionary
312 self.pkg.changes.update(parse_changes(filename))
313 except CantOpenError:
314 self.rejects.append("%s: can't read file." % (filename))
316 except ParseChangesError, line:
317 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
319 except ChangesUnicodeError:
320 self.rejects.append("%s: changes file not proper utf-8" % (filename))
323 # Parse the Files field from the .changes into another dictionary
325 self.pkg.files.update(build_file_list(self.pkg.changes))
326 except ParseChangesError, line:
327 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
329 except UnknownFormatError, format:
330 self.rejects.append("%s: unknown format '%s'." % (filename, format))
333 # Check for mandatory fields
334 for i in ("distribution", "source", "binary", "architecture",
335 "version", "maintainer", "files", "changes", "description"):
336 if not self.pkg.changes.has_key(i):
337 # Avoid undefined errors later
338 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
341 # Strip a source version in brackets from the source field
342 if re_strip_srcver.search(self.pkg.changes["source"]):
343 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
345 # Ensure the source field is a valid package name.
346 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
347 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
349 # Split multi-value fields into a lower-level dictionary
350 for i in ("architecture", "distribution", "binary", "closes"):
351 o = self.pkg.changes.get(i, "")
353 del self.pkg.changes[i]
355 self.pkg.changes[i] = {}
358 self.pkg.changes[i][j] = 1
360 # Fix the Maintainer: field to be RFC822/2047 compatible
362 (self.pkg.changes["maintainer822"],
363 self.pkg.changes["maintainer2047"],
364 self.pkg.changes["maintainername"],
365 self.pkg.changes["maintaineremail"]) = \
366 fix_maintainer (self.pkg.changes["maintainer"])
367 except ParseMaintError, msg:
368 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
369 % (filename, changes["maintainer"], msg))
371 # ...likewise for the Changed-By: field if it exists.
373 (self.pkg.changes["changedby822"],
374 self.pkg.changes["changedby2047"],
375 self.pkg.changes["changedbyname"],
376 self.pkg.changes["changedbyemail"]) = \
377 fix_maintainer (self.pkg.changes.get("changed-by", ""))
378 except ParseMaintError, msg:
379 self.pkg.changes["changedby822"] = ""
380 self.pkg.changes["changedby2047"] = ""
381 self.pkg.changes["changedbyname"] = ""
382 self.pkg.changes["changedbyemail"] = ""
384 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
385 % (filename, changes["changed-by"], msg))
387 # Ensure all the values in Closes: are numbers
388 if self.pkg.changes.has_key("closes"):
389 for i in self.pkg.changes["closes"].keys():
390 if re_isanum.match (i) == None:
391 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
393 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
394 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
395 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
397 # Check there isn't already a changes file of the same name in one
398 # of the queue directories.
399 base_filename = os.path.basename(filename)
400 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
401 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename):
402 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
404 # Check the .changes is non-empty
405 if not self.pkg.files:
406 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
409 # Changes was syntactically valid even if we'll reject
412 ###########################################################################
414 def check_distributions(self):
415 "Check and map the Distribution field"
419 # Handle suite mappings
420 for m in Cnf.ValueList("SuiteMappings"):
423 if mtype == "map" or mtype == "silent-map":
424 (source, dest) = args[1:3]
425 if self.pkg.changes["distribution"].has_key(source):
426 del self.pkg.changes["distribution"][source]
427 self.pkg.changes["distribution"][dest] = 1
428 if mtype != "silent-map":
429 self.notes.append("Mapping %s to %s." % (source, dest))
430 if self.pkg.changes.has_key("distribution-version"):
431 if self.pkg.changes["distribution-version"].has_key(source):
432 self.pkg.changes["distribution-version"][source]=dest
433 elif mtype == "map-unreleased":
434 (source, dest) = args[1:3]
435 if self.pkg.changes["distribution"].has_key(source):
436 for arch in self.pkg.changes["architecture"].keys():
437 if arch not in [ arch_string for a in get_suite_architectures(source) ]:
438 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
439 del self.pkg.changes["distribution"][source]
440 self.pkg.changes["distribution"][dest] = 1
442 elif mtype == "ignore":
444 if self.pkg.changes["distribution"].has_key(suite):
445 del self.pkg.changes["distribution"][suite]
446 self.warnings.append("Ignoring %s as a target suite." % (suite))
447 elif mtype == "reject":
449 if self.pkg.changes["distribution"].has_key(suite):
450 self.rejects.append("Uploads to %s are not accepted." % (suite))
451 elif mtype == "propup-version":
452 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
454 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
455 if self.pkg.changes["distribution"].has_key(args[1]):
456 self.pkg.changes.setdefault("distribution-version", {})
457 for suite in args[2:]:
458 self.pkg.changes["distribution-version"][suite] = suite
460 # Ensure there is (still) a target distribution
461 if len(self.pkg.changes["distribution"].keys()) < 1:
462 self.rejects.append("No valid distribution remaining.")
464 # Ensure target distributions exist
465 for suite in self.pkg.changes["distribution"].keys():
466 if not Cnf.has_key("Suite::%s" % (suite)):
467 self.rejects.append("Unknown distribution `%s'." % (suite))
469 ###########################################################################
471 def build_summaries(self):
472 """ Build a summary of changes the upload introduces. """
474 (byhand, new, summary, override_summary) = self.pkg.file_summary()
476 short_summary = summary
478 # This is for direport's benefit...
479 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
482 summary += "Changes: " + f
484 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
486 summary += self.announce(short_summary, 0)
488 return (summary, short_summary)
490 ###########################################################################
492 def close_bugs(self, summary, action):
494 Send mail to close bugs as instructed by the closes field in the changes file.
495 Also add a line to summary if any work was done.
497 @type summary: string
498 @param summary: summary text, as given by L{build_summaries}
501 @param action: Set to false no real action will be done.
504 @return: summary. If action was taken, extended by the list of closed bugs.
508 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
510 bugs = self.pkg.changes["closes"].keys()
516 summary += "Closing bugs: "
518 summary += "%s " % (bug)
520 self.Subst["__BUG_NUMBER__"] = bug
521 if self.pkg.changes["distribution"].has_key("stable"):
522 self.Subst["__STABLE_WARNING__"] = """
523 Note that this package is not part of the released stable Debian
524 distribution. It may have dependencies on other unreleased software,
525 or other instabilities. Please take care if you wish to install it.
526 The update will eventually make its way into the next released Debian
529 self.Subst["__STABLE_WARNING__"] = ""
530 mail_message = utils.TemplateSubst(self.Subst, template)
531 utils.send_mail(mail_message)
533 # Clear up after ourselves
534 del self.Subst["__BUG_NUMBER__"]
535 del self.Subst["__STABLE_WARNING__"]
538 self.Logger.log(["closing bugs"] + bugs)
544 ###########################################################################
546 def announce(self, short_summary, action):
548 Send an announce mail about a new upload.
550 @type short_summary: string
551 @param short_summary: Short summary text to include in the mail
554 @param action: Set to false no real action will be done.
557 @return: Textstring about action taken.
562 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
564 # Only do announcements for source uploads with a recent dpkg-dev installed
565 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
566 self.pkg.changes["architecture"].has_key("source"):
572 self.Subst["__SHORT_SUMMARY__"] = short_summary
574 for dist in self.pkg.changes["distribution"].keys():
575 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
576 if announce_list == "" or lists_done.has_key(announce_list):
579 lists_done[announce_list] = 1
580 summary += "Announcing to %s\n" % (announce_list)
583 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
584 if cnf.get("Dinstall::TrackingServer") and \
585 self.pkg.changes["architecture"].has_key("source"):
586 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
587 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
589 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
590 utils.send_mail(mail_message)
592 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
594 if cnf.FindB("Dinstall::CloseBugs"):
595 summary = self.close_bugs(summary, action)
597 del self.Subst["__SHORT_SUMMARY__"]
601 ###########################################################################
603 def accept (self, summary, short_summary, targetdir=None):
607 This moves all files referenced from the .changes into the I{accepted}
608 queue, sends the accepted mail, announces to lists, closes bugs and
609 also checks for override disparities. If enabled it will write out
610 the version history for the BTS Version Tracking and will finally call
613 @type summary: string
614 @param summary: Summary text
616 @type short_summary: string
617 @param short_summary: Short summary
622 stats = SummaryStats()
624 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
626 if targetdir is None:
627 targetdir = cnf["Dir::Queue::Accepted"]
630 self.Logger.log(["Accepting changes", self.pkg.changes_file])
632 self.write_dot_dak(targetdir)
634 # Move all the files into the accepted directory
635 utils.move(self.pkg.changes_file, targetdir)
637 for name, entry in sorted(self.pkg.files.items()):
638 utils.move(name, targetdir)
639 stats.accept_bytes += float(entry["size"])
641 stats.accept_count += 1
643 # Send accept mail, announce to lists, close bugs and check for
644 # override disparities
645 if not cnf["Dinstall::Options::No-Mail"]:
646 self.Subst["__SUITE__"] = ""
647 self.Subst["__SUMMARY__"] = summary
648 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
649 utils.send_mail(mail_message)
650 self.announce(short_summary, 1)
652 ## Helper stuff for DebBugs Version Tracking
653 if cnf.Find("Dir::Queue::BTSVersionTrack"):
654 # ??? once queue/* is cleared on *.d.o and/or reprocessed
655 # the conditionalization on dsc["bts changelog"] should be
658 # Write out the version history from the changelog
659 if self.pkg.changes["architecture"].has_key("source") and \
660 self.pkg.dsc.has_key("bts changelog"):
662 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
663 version_history = os.fdopen(fd, 'w')
664 version_history.write(self.pkg.dsc["bts changelog"])
665 version_history.close()
666 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
667 self.pkg.changes_file[:-8]+".versions")
668 os.rename(temp_filename, filename)
669 os.chmod(filename, 0644)
671 # Write out the binary -> source mapping.
672 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
673 debinfo = os.fdopen(fd, 'w')
674 for name, entry in sorted(self.pkg.files.items()):
675 if entry["type"] == "deb":
676 line = " ".join([entry["package"], entry["version"],
677 entry["architecture"], entry["source package"],
678 entry["source version"]])
679 debinfo.write(line+"\n")
681 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
682 self.pkg.changes_file[:-8]+".debinfo")
683 os.rename(temp_filename, filename)
684 os.chmod(filename, 0644)
686 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
687 # <Ganneff> we do call queue_build too
688 # <mhy> well yes, we'd have had to if we were inserting into accepted
689 # <Ganneff> now. thats database only.
690 # <mhy> urgh, that's going to get messy
691 # <Ganneff> so i make the p-n call to it *also* using accepted/
692 # <mhy> but then the packages will be in the queue_build table without the files being there
693 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
694 # <mhy> ah, good point
695 # <Ganneff> so it will work out, as unchecked move it over
696 # <mhy> that's all completely sick
699 # This routine returns None on success or an error on failure
700 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
705 def check_override (self):
707 Checks override entries for validity. Mails "Override disparity" warnings,
708 if that feature is enabled.
710 Abandons the check if
711 - override disparity checks are disabled
712 - mail sending is disabled
717 # Abandon the check if:
718 # a) override disparity checks have been disabled
719 # b) we're not sending mail
720 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
721 cnf["Dinstall::Options::No-Mail"]:
724 summary = self.pkg.check_override()
729 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
731 self.Subst["__SUMMARY__"] = summary
732 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
733 utils.send_mail(mail_message)
734 del self.Subst["__SUMMARY__"]
736 ###########################################################################
737 def force_reject(self, reject_files):
739 Forcefully move files from the current directory to the
740 reject directory. If any file already exists in the reject
741 directory it will be moved to the morgue to make way for
745 @param files: file dictionary
751 for file_entry in reject_files:
752 # Skip any files which don't exist or which we don't have permission to copy.
753 if os.access(file_entry, os.R_OK) == 0:
756 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
759 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
761 # File exists? Let's try and move it to the morgue
762 if e.errno == errno.EEXIST:
763 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
765 morgue_file = utils.find_next_free(morgue_file)
766 except NoFreeFilenameError:
767 # Something's either gone badly Pete Tong, or
768 # someone is trying to exploit us.
769 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
771 utils.move(dest_file, morgue_file, perms=0660)
773 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
776 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
780 # If we got here, we own the destination file, so we can
781 # safely overwrite it.
782 utils.move(file_entry, dest_file, 1, perms=0660)
785 ###########################################################################
786 def do_reject (self, manual=0, reject_message="", note=""):
788 Reject an upload. If called without a reject message or C{manual} is
789 true, spawn an editor so the user can write one.
792 @param manual: manual or automated rejection
794 @type reject_message: string
795 @param reject_message: A reject message
800 # If we weren't given a manual rejection message, spawn an
801 # editor so the user can add one in...
802 if manual and not reject_message:
803 (fd, temp_filename) = utils.temp_filename()
804 temp_file = os.fdopen(fd, 'w')
807 temp_file.write(line)
809 editor = os.environ.get("EDITOR","vi")
812 os.system("%s %s" % (editor, temp_filename))
813 temp_fh = utils.open_file(temp_filename)
814 reject_message = "".join(temp_fh.readlines())
816 print "Reject message:"
817 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
818 prompt = "[R]eject, Edit, Abandon, Quit ?"
820 while prompt.find(answer) == -1:
821 answer = utils.our_raw_input(prompt)
822 m = re_default_answer.search(prompt)
825 answer = answer[:1].upper()
826 os.unlink(temp_filename)
836 reason_filename = self.pkg.changes_file[:-8] + ".reason"
837 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
839 # Move all the files into the reject directory
840 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
841 self.force_reject(reject_files)
843 # If we fail here someone is probably trying to exploit the race
844 # so let's just raise an exception ...
845 if os.path.exists(reason_filename):
846 os.unlink(reason_filename)
847 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
849 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
852 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
853 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
854 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
855 os.write(reason_fd, reject_message)
856 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
858 # Build up the rejection email
859 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
860 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
861 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
862 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
863 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
864 # Write the rejection email out as the <foo>.reason file
865 os.write(reason_fd, reject_mail_message)
867 del self.Subst["__REJECTOR_ADDRESS__"]
868 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
869 del self.Subst["__CC__"]
873 # Send the rejection mail if appropriate
874 if not cnf["Dinstall::Options::No-Mail"]:
875 utils.send_mail(reject_mail_message)
877 self.Logger.log(["rejected", pkg.changes_file])
881 ################################################################################
882 def in_override_p(self, package, component, suite, binary_type, file, session=None):
884 Check if a package already has override entries in the DB
886 @type package: string
887 @param package: package name
889 @type component: string
890 @param component: database id of the component
893 @param suite: database id of the suite
895 @type binary_type: string
896 @param binary_type: type of the package
899 @param file: filename we check
901 @return: the database result. But noone cares anyway.
908 session = DBConn().session()
910 if binary_type == "": # must be source
913 file_type = binary_type
915 # Override suite name; used for example with proposed-updates
916 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
917 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
919 result = get_override(package, suite, component, file_type, session)
921 # If checking for a source package fall back on the binary override type
922 if file_type == "dsc" and len(result) < 1:
923 result = get_override(package, suite, component, ['deb', 'udeb'], session)
925 # Remember the section and priority so we can check them later if appropriate
928 self.pkg.files[file]["override section"] = result.section.section
929 self.pkg.files[file]["override priority"] = result.priority.priority
934 ################################################################################
935 def get_anyversion(self, sv_list, suite):
938 @param sv_list: list of (suite, version) tuples to check
941 @param suite: suite name
946 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
947 for (s, v) in sv_list:
948 if s in [ x.lower() for x in anysuite ]:
949 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
954 ################################################################################
956 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
959 @param sv_list: list of (suite, version) tuples to check
964 @type new_version: string
965 @param new_version: XXX
967 Ensure versions are newer than existing packages in target
968 suites and that cross-suite version checking rules as
969 set out in the conf file are satisfied.
974 # Check versions for each target suite
975 for target_suite in self.pkg.changes["distribution"].keys():
976 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
977 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
979 # Enforce "must be newer than target suite" even if conffile omits it
980 if target_suite not in must_be_newer_than:
981 must_be_newer_than.append(target_suite)
983 for (suite, existent_version) in sv_list:
984 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
986 if suite in must_be_newer_than and sourceful and vercmp < 1:
987 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
989 if suite in must_be_older_than and vercmp > -1:
992 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
993 # we really use the other suite, ignoring the conflicting one ...
994 addsuite = self.pkg.changes["distribution-version"][suite]
996 add_version = self.get_anyversion(sv_list, addsuite)
997 target_version = self.get_anyversion(sv_list, target_suite)
1000 # not add_version can only happen if we map to a suite
1001 # that doesn't enhance the suite we're propup'ing from.
1002 # so "propup-ver x a b c; map a d" is a problem only if
1003 # d doesn't enhance a.
1005 # i think we could always propagate in this case, rather
1006 # than complaining. either way, this isn't a REJECT issue
1008 # And - we really should complain to the dorks who configured dak
1009 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1010 self.pkg.changes.setdefault("propdistribution", {})
1011 self.pkg.changes["propdistribution"][addsuite] = 1
1013 elif not target_version:
1014 # not targets_version is true when the package is NEW
1015 # we could just stick with the "...old version..." REJECT
1016 # for this, I think.
1017 self.rejects.append("Won't propogate NEW packages.")
1018 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1019 # propogation would be redundant. no need to reject though.
1020 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1022 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1023 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1025 self.warnings.append("Propogating upload to %s" % (addsuite))
1026 self.pkg.changes.setdefault("propdistribution", {})
1027 self.pkg.changes["propdistribution"][addsuite] = 1
1031 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1033 ################################################################################
1035 def check_binary_against_db(self, file, session=None):
1041 session = DBConn().session()
1043 # Ensure version is sane
1044 q = session.query(BinAssociation)
1045 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1046 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1048 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1049 file, files[file]["version"], sourceful=False)
1051 # Check for any existing copies of the file
1052 q = session.query(DBBinary).filter_by(files[file]["package"])
1053 q = q.filter_by(version=files[file]["version"])
1054 q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
1057 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1059 ################################################################################
1061 def check_source_against_db(self, file, session=None):
1065 session = DBConn().session()
1067 source = self.pkg.dsc.get("source")
1068 version = self.pkg.dsc.get("version")
1070 # Ensure version is sane
1071 q = session.query(SrcAssociation)
1072 q = q.join(DBSource).filter(DBSource.source==source)
1074 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1075 file, version, sourceful=True)
1077 ################################################################################
1078 def check_dsc_against_db(self, file):
1081 @warning: NB: this function can remove entries from the 'files' index [if
1082 the .orig.tar.gz is a duplicate of the one in the archive]; if
1083 you're iterating over 'files' and call this function as part of
1084 the loop, be sure to add a check to the top of the loop to
1085 ensure you haven't just tried to dereference the deleted entry.
1088 self.pkg.orig_tar_gz = None
1090 # Try and find all files mentioned in the .dsc. This has
1091 # to work harder to cope with the multiple possible
1092 # locations of an .orig.tar.gz.
1093 # The ordering on the select is needed to pick the newest orig
1094 # when it exists in multiple places.
1095 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1097 if self.pkg.files.has_key(dsc_name):
1098 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1099 actual_size = int(self.pkg.files[dsc_name]["size"])
1100 found = "%s in incoming" % (dsc_name)
1102 # Check the file does not already exist in the archive
1103 ql = get_poolfile_like_name(dsc_name)
1105 # Strip out anything that isn't '%s' or '/%s$'
1107 if not i.filename.endswith(dsc_name):
1110 # "[dak] has not broken them. [dak] has fixed a
1111 # brokenness. Your crappy hack exploited a bug in
1114 # "(Come on! I thought it was always obvious that
1115 # one just doesn't release different files with
1116 # the same name and version.)"
1117 # -- ajk@ on d-devel@l.d.o
1120 # Ignore exact matches for .orig.tar.gz
1122 if dsc_name.endswith(".orig.tar.gz"):
1124 if self.pkg.files.has_key(dsc_name) and \
1125 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
1126 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
1127 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
1128 # TODO: Don't delete the entry, just mark it as not needed
1129 # This would fix the stupidity of changing something we often iterate over
1130 # whilst we're doing it
1132 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
1136 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
1138 elif dsc_name.endswith(".orig.tar.gz"):
1140 ql = get_poolfile_like_name(dsc_name)
1142 # Strip out anything that isn't '%s' or '/%s$'
1143 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
1145 if not i.filename.endswith(dsc_name):
1149 # Unfortunately, we may get more than one match here if,
1150 # for example, the package was in potato but had an -sa
1151 # upload in woody. So we need to choose the right one.
1153 # default to something sane in case we don't match any or have only one
1158 old_file = os.path.join(i.location.path, i.filename)
1159 old_file_fh = utils.open_file(old_file)
1160 actual_md5 = apt_pkg.md5sum(old_file_fh)
1162 actual_size = os.stat(old_file)[stat.ST_SIZE]
1163 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
1166 old_file = os.path.join(i.location.path, i.filename)
1167 old_file_fh = utils.open_file(old_file)
1168 actual_md5 = apt_pkg.md5sum(old_file_fh)
1170 actual_size = os.stat(old_file)[stat.ST_SIZE]
1172 suite_type = f.location.archive_type
1173 # need this for updating dsc_files in install()
1174 dsc_entry["files id"] = f.file_id
1175 # See install() in process-accepted...
1176 self.pkg.orig_tar_id = f.file_id
1177 self.pkg.orig_tar_gz = old_file
1178 self.pkg.orig_tar_location = f.location.location_id
1180 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
1181 # Not there? Check the queue directories...
1182 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1183 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
1184 if os.path.exists(in_otherdir):
1185 in_otherdir_fh = utils.open_file(in_otherdir)
1186 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1187 in_otherdir_fh.close()
1188 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1190 self.pkg.orig_tar_gz = in_otherdir
1193 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
1194 self.pkg.orig_tar_gz = -1
1197 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
1199 if actual_md5 != dsc_entry["md5sum"]:
1200 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
1201 if actual_size != int(dsc_entry["size"]):
1202 self.rejects.append("size for %s doesn't match %s." % (found, file))