5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from dak_exceptions import *
43 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
44 from config import Config
45 from summarystats import SummaryStats
49 ###############################################################################
51 # Determine what parts in a .changes are NEW
53 def determine_new(changes, files, projectB, warn=1):
55 Determine what parts in a C{changes} file are NEW.
57 @type changes: Upload.Pkg.changes dict
58 @param changes: Changes dictionary
60 @type files: Upload.Pkg.files dict
61 @param files: Files dictionary
63 @type projectB: pgobject
64 @param projectB: DB handle
67 @param warn: Warn if overrides are added for (old)stable
70 @return: dictionary of NEW components.
75 # Build up a list of potentially new things
76 for file_entry in files.keys():
78 # Skip byhand elements
79 if f["type"] == "byhand":
82 priority = f["priority"]
83 section = f["section"]
84 file_type = get_type(f)
85 component = f["component"]
87 if file_type == "dsc":
89 if not new.has_key(pkg):
91 new[pkg]["priority"] = priority
92 new[pkg]["section"] = section
93 new[pkg]["type"] = file_type
94 new[pkg]["component"] = component
95 new[pkg]["files"] = []
97 old_type = new[pkg]["type"]
98 if old_type != file_type:
99 # source gets trumped by deb or udeb
100 if old_type == "dsc":
101 new[pkg]["priority"] = priority
102 new[pkg]["section"] = section
103 new[pkg]["type"] = file_type
104 new[pkg]["component"] = component
105 new[pkg]["files"].append(file_entry)
106 if f.has_key("othercomponents"):
107 new[pkg]["othercomponents"] = f["othercomponents"]
109 for suite in changes["suite"].keys():
110 suite_id = database.get_suite_id(suite)
111 for pkg in new.keys():
112 component_id = database.get_component_id(new[pkg]["component"])
113 type_id = database.get_override_type_id(new[pkg]["type"])
114 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
117 for file_entry in new[pkg]["files"]:
118 if files[file_entry].has_key("new"):
119 del files[file_entry]["new"]
123 if changes["suite"].has_key("stable"):
124 print "WARNING: overrides will be added for stable!"
125 if changes["suite"].has_key("oldstable"):
126 print "WARNING: overrides will be added for OLDstable!"
127 for pkg in new.keys():
128 if new[pkg].has_key("othercomponents"):
129 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
133 ################################################################################
137 Get the file type of C{file}
140 @param file: file entry
147 if file.has_key("dbtype"):
148 file_type = file["dbtype"]
149 elif file["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
152 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
154 # Validate the override type
155 type_id = database.get_override_type_id(file_type)
157 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
161 ################################################################################
165 def check_valid(new):
167 Check if section and priority for NEW packages exist in database.
168 Additionally does sanity checks:
169 - debian-installer packages have to be udeb (or source)
170 - non debian-installer packages can not be udeb
171 - source priority can only be assigned to dsc file types
174 @param new: Dict of new packages with their section, priority and type.
177 for pkg in new.keys():
178 section = new[pkg]["section"]
179 priority = new[pkg]["priority"]
180 file_type = new[pkg]["type"]
181 new[pkg]["section id"] = database.get_section_id(section)
182 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
184 di = section.find("debian-installer") != -1
185 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
186 new[pkg]["section id"] = -1
187 if (priority == "source" and file_type != "dsc") or \
188 (priority != "source" and file_type == "dsc"):
189 new[pkg]["priority id"] = -1
192 ###############################################################################
194 class Upload(object):
196 Everything that has to do with an upload processed.
201 Initialize various variables and the global substitution template mappings.
202 Also connect to the DB and initialize the Database module.
209 ###########################################################################
212 """ Reset a number of internal variables."""
214 # Initialize the substitution template map
217 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
218 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
219 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
220 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
222 self.reject_message = ""
225 ###########################################################################
226 def update_subst(self, reject_message = ""):
227 """ Set up the per-package template substitution mappings """
231 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
232 if not self.pkg.changes.has_key("architecture") or not \
233 isinstance(changes["architecture"], DictType):
234 self.pkg.changes["architecture"] = { "Unknown" : "" }
236 # and maintainer2047 may not exist.
237 if not self.pkg.changes.has_key("maintainer2047"):
238 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
240 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
241 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
242 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
244 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
245 if self.pkg.changes["architecture"].has_key("source") and \
246 self.pkg.changes["changedby822"] != "" and \
247 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
249 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
250 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
251 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
253 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
254 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
255 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
257 if "sponsoremail" in self.pkg.changes:
258 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
260 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
261 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
263 # Apply any global override of the Maintainer field
264 if cnf.get("Dinstall::OverrideMaintainer"):
265 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
266 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
268 self.Subst["__REJECT_MESSAGE__"] = self.reject_message
269 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
270 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
272 ###########################################################################
274 def build_summaries(self):
275 """ Build a summary of changes the upload introduces. """
277 (byhand, new, summary, override_summary) = self.pkg.file_summary()
279 short_summary = summary
281 # This is for direport's benefit...
282 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
285 summary += "Changes: " + f
287 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
289 summary += self.announce(short_summary, 0)
291 return (summary, short_summary)
293 ###########################################################################
295 def close_bugs(self, summary, action):
297 Send mail to close bugs as instructed by the closes field in the changes file.
298 Also add a line to summary if any work was done.
300 @type summary: string
301 @param summary: summary text, as given by L{build_summaries}
304 @param action: Set to false no real action will be done.
307 @return: summary. If action was taken, extended by the list of closed bugs.
311 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
313 bugs = self.pkg.changes["closes"].keys()
319 summary += "Closing bugs: "
321 summary += "%s " % (bug)
323 self.Subst["__BUG_NUMBER__"] = bug
324 if self.pkg.changes["distribution"].has_key("stable"):
325 self.Subst["__STABLE_WARNING__"] = """
326 Note that this package is not part of the released stable Debian
327 distribution. It may have dependencies on other unreleased software,
328 or other instabilities. Please take care if you wish to install it.
329 The update will eventually make its way into the next released Debian
332 self.Subst["__STABLE_WARNING__"] = ""
333 mail_message = utils.TemplateSubst(self.Subst, template)
334 utils.send_mail(mail_message)
336 # Clear up after ourselves
337 del self.Subst["__BUG_NUMBER__"]
338 del self.Subst["__STABLE_WARNING__"]
341 self.Logger.log(["closing bugs"] + bugs)
347 ###########################################################################
349 def announce(self, short_summary, action):
351 Send an announce mail about a new upload.
353 @type short_summary: string
354 @param short_summary: Short summary text to include in the mail
357 @param action: Set to false no real action will be done.
360 @return: Textstring about action taken.
365 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
367 # Only do announcements for source uploads with a recent dpkg-dev installed
368 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
369 self.pkg.changes["architecture"].has_key("source"):
375 self.Subst["__SHORT_SUMMARY__"] = short_summary
377 for dist in self.pkg.changes["distribution"].keys():
378 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
379 if announce_list == "" or lists_done.has_key(announce_list):
382 lists_done[announce_list] = 1
383 summary += "Announcing to %s\n" % (announce_list)
386 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
387 if cnf.get("Dinstall::TrackingServer") and \
388 self.pkg.changes["architecture"].has_key("source"):
389 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
390 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
392 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
393 utils.send_mail(mail_message)
395 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
397 if cnf.FindB("Dinstall::CloseBugs"):
398 summary = self.close_bugs(summary, action)
400 del self.Subst["__SHORT_SUMMARY__"]
404 ###########################################################################
406 def accept (self, summary, short_summary, targetdir=None):
410 This moves all files referenced from the .changes into the I{accepted}
411 queue, sends the accepted mail, announces to lists, closes bugs and
412 also checks for override disparities. If enabled it will write out
413 the version history for the BTS Version Tracking and will finally call
416 @type summary: string
417 @param summary: Summary text
419 @type short_summary: string
420 @param short_summary: Short summary
425 stats = SummaryStats()
427 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
429 if targetdir is None:
430 targetdir = cnf["Dir::Queue::Accepted"]
433 self.Logger.log(["Accepting changes", self.pkg.changes_file])
435 self.write_dot_dak(targetdir)
437 # Move all the files into the accepted directory
438 utils.move(self.pkg.changes_file, targetdir)
440 for name, entry in sorted(self.pkg.files.items()):
441 utils.move(name, targetdir)
442 stats.accept_bytes += float(entry["size"])
444 stats.accept_count += 1
446 # Send accept mail, announce to lists, close bugs and check for
447 # override disparities
448 if not cnf["Dinstall::Options::No-Mail"]:
449 self.Subst["__SUITE__"] = ""
450 self.Subst["__SUMMARY__"] = summary
451 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
452 utils.send_mail(mail_message)
453 self.announce(short_summary, 1)
455 ## Helper stuff for DebBugs Version Tracking
456 if cnf.Find("Dir::Queue::BTSVersionTrack"):
457 # ??? once queue/* is cleared on *.d.o and/or reprocessed
458 # the conditionalization on dsc["bts changelog"] should be
461 # Write out the version history from the changelog
462 if self.pkg.changes["architecture"].has_key("source") and \
463 self.pkg.dsc.has_key("bts changelog"):
465 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
466 version_history = os.fdopen(fd, 'w')
467 version_history.write(self.pkg.dsc["bts changelog"])
468 version_history.close()
469 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
470 self.pkg.changes_file[:-8]+".versions")
471 os.rename(temp_filename, filename)
472 os.chmod(filename, 0644)
474 # Write out the binary -> source mapping.
475 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
476 debinfo = os.fdopen(fd, 'w')
477 for name, entry in sorted(self.pkg.files.items()):
478 if entry["type"] == "deb":
479 line = " ".join([entry["package"], entry["version"],
480 entry["architecture"], entry["source package"],
481 entry["source version"]])
482 debinfo.write(line+"\n")
484 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
485 self.pkg.changes_file[:-8]+".debinfo")
486 os.rename(temp_filename, filename)
487 os.chmod(filename, 0644)
489 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
490 # <Ganneff> we do call queue_build too
491 # <mhy> well yes, we'd have had to if we were inserting into accepted
492 # <Ganneff> now. thats database only.
493 # <mhy> urgh, that's going to get messy
494 # <Ganneff> so i make the p-n call to it *also* using accepted/
495 # <mhy> but then the packages will be in the queue_build table without the files being there
496 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
497 # <mhy> ah, good point
498 # <Ganneff> so it will work out, as unchecked move it over
499 # <mhy> that's all completely sick
502 # This routine returns None on success or an error on failure
503 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
508 def check_override (self):
510 Checks override entries for validity. Mails "Override disparity" warnings,
511 if that feature is enabled.
513 Abandons the check if
514 - override disparity checks are disabled
515 - mail sending is disabled
520 # Abandon the check if:
521 # a) override disparity checks have been disabled
522 # b) we're not sending mail
523 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
524 cnf["Dinstall::Options::No-Mail"]:
527 summary = self.pkg.check_override()
532 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
534 self.Subst["__SUMMARY__"] = summary
535 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
536 utils.send_mail(mail_message)
537 del self.Subst["__SUMMARY__"]
539 ###########################################################################
540 def force_reject(self, reject_files):
542 Forcefully move files from the current directory to the
543 reject directory. If any file already exists in the reject
544 directory it will be moved to the morgue to make way for
548 @param files: file dictionary
554 for file_entry in reject_files:
555 # Skip any files which don't exist or which we don't have permission to copy.
556 if os.access(file_entry, os.R_OK) == 0:
559 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
562 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
564 # File exists? Let's try and move it to the morgue
565 if e.errno == errno.EEXIST:
566 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
568 morgue_file = utils.find_next_free(morgue_file)
569 except NoFreeFilenameError:
570 # Something's either gone badly Pete Tong, or
571 # someone is trying to exploit us.
572 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
574 utils.move(dest_file, morgue_file, perms=0660)
576 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
579 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
583 # If we got here, we own the destination file, so we can
584 # safely overwrite it.
585 utils.move(file_entry, dest_file, 1, perms=0660)
588 ###########################################################################
589 def do_reject (self, manual=0, reject_message="", note=""):
591 Reject an upload. If called without a reject message or C{manual} is
592 true, spawn an editor so the user can write one.
595 @param manual: manual or automated rejection
597 @type reject_message: string
598 @param reject_message: A reject message
603 # If we weren't given a manual rejection message, spawn an
604 # editor so the user can add one in...
605 if manual and not reject_message:
606 (fd, temp_filename) = utils.temp_filename()
607 temp_file = os.fdopen(fd, 'w')
610 temp_file.write(line)
612 editor = os.environ.get("EDITOR","vi")
615 os.system("%s %s" % (editor, temp_filename))
616 temp_fh = utils.open_file(temp_filename)
617 reject_message = "".join(temp_fh.readlines())
619 print "Reject message:"
620 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
621 prompt = "[R]eject, Edit, Abandon, Quit ?"
623 while prompt.find(answer) == -1:
624 answer = utils.our_raw_input(prompt)
625 m = re_default_answer.search(prompt)
628 answer = answer[:1].upper()
629 os.unlink(temp_filename)
639 reason_filename = self.pkg.changes_file[:-8] + ".reason"
640 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
642 # Move all the files into the reject directory
643 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
644 self.force_reject(reject_files)
646 # If we fail here someone is probably trying to exploit the race
647 # so let's just raise an exception ...
648 if os.path.exists(reason_filename):
649 os.unlink(reason_filename)
650 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
652 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
655 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
656 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
657 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
658 os.write(reason_fd, reject_message)
659 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
661 # Build up the rejection email
662 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
663 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
664 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
665 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
666 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
667 # Write the rejection email out as the <foo>.reason file
668 os.write(reason_fd, reject_mail_message)
670 del self.Subst["__REJECTOR_ADDRESS__"]
671 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
672 del self.Subst["__CC__"]
676 # Send the rejection mail if appropriate
677 if not cnf["Dinstall::Options::No-Mail"]:
678 utils.send_mail(reject_mail_message)
680 self.Logger.log(["rejected", pkg.changes_file])
684 ################################################################################
685 def in_override_p(self, package, component, suite, binary_type, file, session=None):
687 Check if a package already has override entries in the DB
689 @type package: string
690 @param package: package name
692 @type component: string
693 @param component: database id of the component, as returned by L{database.get_component_id}
696 @param suite: database id of the suite, as returned by L{database.get_suite_id}
698 @type binary_type: string
699 @param binary_type: type of the package
702 @param file: filename we check
704 @return: the database result. But noone cares anyway.
711 session = DBConn().session()
713 if binary_type == "": # must be source
716 file_type = binary_type
718 # Override suite name; used for example with proposed-updates
719 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
720 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
722 result = get_override(package, suite, component, file_type, session)
724 # If checking for a source package fall back on the binary override type
725 if file_type == "dsc" and len(result) < 1:
726 result = get_override(package, suite, component, ['deb', 'udeb'], session)
728 # Remember the section and priority so we can check them later if appropriate
731 self.pkg.files[file]["override section"] = result.section.section
732 self.pkg.files[file]["override priority"] = result.priority.priority
737 ################################################################################
738 def reject (self, str, prefix="Rejected: "):
740 Add C{str} to reject_message. Adds C{prefix}, by default "Rejected: "
743 @param str: Reject text
746 @param prefix: Prefix text, default Rejected:
750 # Unlike other rejects we add new lines first to avoid trailing
751 # new lines when this message is passed back up to a caller.
752 if self.reject_message:
753 self.reject_message += "\n"
754 self.reject_message += prefix + str
756 ################################################################################
757 def get_anyversion(self, sv_list, suite):
760 @param sv_list: list of (suite, version) tuples to check
763 @param suite: suite name
768 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
769 for (s, v) in sv_list:
770 if s in [ x.lower() for x in anysuite ]:
771 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
776 ################################################################################
778 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
781 @param sv_list: list of (suite, version) tuples to check
786 @type new_version: string
787 @param new_version: XXX
789 Ensure versions are newer than existing packages in target
790 suites and that cross-suite version checking rules as
791 set out in the conf file are satisfied.
796 # Check versions for each target suite
797 for target_suite in self.pkg.changes["distribution"].keys():
798 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
799 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
801 # Enforce "must be newer than target suite" even if conffile omits it
802 if target_suite not in must_be_newer_than:
803 must_be_newer_than.append(target_suite)
805 for (suite, existent_version) in sv_list:
806 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
808 if suite in must_be_newer_than and sourceful and vercmp < 1:
809 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
811 if suite in must_be_older_than and vercmp > -1:
814 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
815 # we really use the other suite, ignoring the conflicting one ...
816 addsuite = self.pkg.changes["distribution-version"][suite]
818 add_version = self.get_anyversion(sv_list, addsuite)
819 target_version = self.get_anyversion(sv_list, target_suite)
822 # not add_version can only happen if we map to a suite
823 # that doesn't enhance the suite we're propup'ing from.
824 # so "propup-ver x a b c; map a d" is a problem only if
825 # d doesn't enhance a.
827 # i think we could always propagate in this case, rather
828 # than complaining. either way, this isn't a REJECT issue
830 # And - we really should complain to the dorks who configured dak
831 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
832 self.pkg.changes.setdefault("propdistribution", {})
833 self.pkg.changes["propdistribution"][addsuite] = 1
835 elif not target_version:
836 # not targets_version is true when the package is NEW
837 # we could just stick with the "...old version..." REJECT
839 self.reject("Won't propogate NEW packages.")
840 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
841 # propogation would be redundant. no need to reject though.
842 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
844 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
845 apt_pkg.VersionCompare(add_version, target_version) >= 0:
847 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
848 self.pkg.changes.setdefault("propdistribution", {})
849 self.pkg.changes["propdistribution"][addsuite] = 1
853 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
855 ################################################################################
857 def check_binary_against_db(self, file, session=None):
863 session = DBConn().session()
865 self.reject_message = ""
867 # Ensure version is sane
868 q = session.query(BinAssociation)
869 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
870 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
872 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
873 file, files[file]["version"], sourceful=False)
875 # Check for any existing copies of the file
876 q = session.query(DBBinary).filter_by(files[file]["package"])
877 q = q.filter_by(version=files[file]["version"])
878 q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
881 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
883 return self.reject_message
885 ################################################################################
887 def check_source_against_db(self, file, session=None):
891 session = DBConn().session()
893 self.reject_message = ""
894 source = self.pkg.dsc.get("source")
895 version = self.pkg.dsc.get("version")
897 # Ensure version is sane
898 q = session.query(SrcAssociation)
899 q = q.join(DBSource).filter(DBSource.source==source)
901 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
902 file, version, sourceful=True)
904 return self.reject_message
906 ################################################################################
907 def check_dsc_against_db(self, file):
910 @warning: NB: this function can remove entries from the 'files' index [if
911 the .orig.tar.gz is a duplicate of the one in the archive]; if
912 you're iterating over 'files' and call this function as part of
913 the loop, be sure to add a check to the top of the loop to
914 ensure you haven't just tried to dereference the deleted entry.
917 self.reject_message = ""
918 self.pkg.orig_tar_gz = None
920 # Try and find all files mentioned in the .dsc. This has
921 # to work harder to cope with the multiple possible
922 # locations of an .orig.tar.gz.
923 # The ordering on the select is needed to pick the newest orig
924 # when it exists in multiple places.
925 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
927 if self.pkg.files.has_key(dsc_name):
928 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
929 actual_size = int(self.pkg.files[dsc_name]["size"])
930 found = "%s in incoming" % (dsc_name)
932 # Check the file does not already exist in the archive
933 ql = get_poolfile_like_name(dsc_name)
935 # Strip out anything that isn't '%s' or '/%s$'
937 if not i.filename.endswith(dsc_name):
940 # "[dak] has not broken them. [dak] has fixed a
941 # brokenness. Your crappy hack exploited a bug in
944 # "(Come on! I thought it was always obvious that
945 # one just doesn't release different files with
946 # the same name and version.)"
947 # -- ajk@ on d-devel@l.d.o
950 # Ignore exact matches for .orig.tar.gz
952 if dsc_name.endswith(".orig.tar.gz"):
954 if self.pkg.files.has_key(dsc_name) and \
955 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
956 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
957 self.reject("ignoring %s, since it's already in the archive." % (dsc_name), "Warning: ")
958 # TODO: Don't delete the entry, just mark it as not needed
959 # This would fix the stupidity of changing something we often iterate over
960 # whilst we're doing it
962 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
966 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
968 elif dsc_name.endswith(".orig.tar.gz"):
970 ql = get_poolfile_like_name(dsc_name)
972 # Strip out anything that isn't '%s' or '/%s$'
973 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
975 if not i.filename.endswith(dsc_name):
979 # Unfortunately, we may get more than one match here if,
980 # for example, the package was in potato but had an -sa
981 # upload in woody. So we need to choose the right one.
983 # default to something sane in case we don't match any or have only one
988 old_file = os.path.join(i.location.path, i.filename)
989 old_file_fh = utils.open_file(old_file)
990 actual_md5 = apt_pkg.md5sum(old_file_fh)
992 actual_size = os.stat(old_file)[stat.ST_SIZE]
993 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
996 old_file = os.path.join(i.location.path, i.filename)
997 old_file_fh = utils.open_file(old_file)
998 actual_md5 = apt_pkg.md5sum(old_file_fh)
1000 actual_size = os.stat(old_file)[stat.ST_SIZE]
1002 suite_type = f.location.archive_type
1003 # need this for updating dsc_files in install()
1004 dsc_entry["files id"] = f.file_id
1005 # See install() in process-accepted...
1006 self.pkg.orig_tar_id = f.file_id
1007 self.pkg.orig_tar_gz = old_file
1008 self.pkg.orig_tar_location = f.location.location_id
1010 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
1011 # Not there? Check the queue directories...
1012 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1013 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
1014 if os.path.exists(in_otherdir):
1015 in_otherdir_fh = utils.open_file(in_otherdir)
1016 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1017 in_otherdir_fh.close()
1018 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1020 self.pkg.orig_tar_gz = in_otherdir
1023 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
1024 self.pkg.orig_tar_gz = -1
1027 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
1029 if actual_md5 != dsc_entry["md5sum"]:
1030 self.reject("md5sum for %s doesn't match %s." % (found, file))
1031 if actual_size != int(dsc_entry["size"]):
1032 self.reject("size for %s doesn't match %s." % (found, file))
1034 return (self.reject_message, None)