5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
40 from dak_exceptions import *
41 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
45 ###############################################################################
47 # Determine what parts in a .changes are NEW
49 def determine_new(changes, files, projectB, warn=1):
51 Determine what parts in a C{changes} file are NEW.
53 @type changes: Upload.Pkg.changes dict
54 @param changes: Changes dictionary
56 @type files: Upload.Pkg.files dict
57 @param files: Files dictionary
59 @type projectB: pgobject
60 @param projectB: DB handle
63 @param warn: Warn if overrides are added for (old)stable
66 @return: dictionary of NEW components.
71 # Build up a list of potentially new things
72 for file_entry in files.keys():
74 # Skip byhand elements
75 if f["type"] == "byhand":
78 priority = f["priority"]
79 section = f["section"]
80 file_type = get_type(f)
81 component = f["component"]
83 if file_type == "dsc":
85 if not new.has_key(pkg):
87 new[pkg]["priority"] = priority
88 new[pkg]["section"] = section
89 new[pkg]["type"] = file_type
90 new[pkg]["component"] = component
91 new[pkg]["files"] = []
93 old_type = new[pkg]["type"]
94 if old_type != file_type:
95 # source gets trumped by deb or udeb
97 new[pkg]["priority"] = priority
98 new[pkg]["section"] = section
99 new[pkg]["type"] = file_type
100 new[pkg]["component"] = component
101 new[pkg]["files"].append(file_entry)
102 if f.has_key("othercomponents"):
103 new[pkg]["othercomponents"] = f["othercomponents"]
105 for suite in changes["suite"].keys():
106 suite_id = database.get_suite_id(suite)
107 for pkg in new.keys():
108 component_id = database.get_component_id(new[pkg]["component"])
109 type_id = database.get_override_type_id(new[pkg]["type"])
110 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
113 for file_entry in new[pkg]["files"]:
114 if files[file_entry].has_key("new"):
115 del files[file_entry]["new"]
119 if changes["suite"].has_key("stable"):
120 print "WARNING: overrides will be added for stable!"
121 if changes["suite"].has_key("oldstable"):
122 print "WARNING: overrides will be added for OLDstable!"
123 for pkg in new.keys():
124 if new[pkg].has_key("othercomponents"):
125 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
129 ################################################################################
133 Get the file type of C{file}
136 @param file: file entry
143 if file.has_key("dbtype"):
144 file_type = file["dbtype"]
145 elif file["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
148 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
150 # Validate the override type
151 type_id = database.get_override_type_id(file_type)
153 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
157 ################################################################################
161 def check_valid(new):
163 Check if section and priority for NEW packages exist in database.
164 Additionally does sanity checks:
165 - debian-installer packages have to be udeb (or source)
166 - non debian-installer packages can not be udeb
167 - source priority can only be assigned to dsc file types
170 @param new: Dict of new packages with their section, priority and type.
173 for pkg in new.keys():
174 section = new[pkg]["section"]
175 priority = new[pkg]["priority"]
176 file_type = new[pkg]["type"]
177 new[pkg]["section id"] = database.get_section_id(section)
178 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
180 di = section.find("debian-installer") != -1
181 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
182 new[pkg]["section id"] = -1
183 if (priority == "source" and file_type != "dsc") or \
184 (priority != "source" and file_type == "dsc"):
185 new[pkg]["priority id"] = -1
188 ###############################################################################
191 """ Convenience wrapper to carry around all the package information """
192 def __init__(self, **kwds):
193 self.__dict__.update(kwds)
195 def update(self, **kwds):
196 self.__dict__.update(kwds)
198 ###############################################################################
202 Everything that has to do with an upload processed.
205 def __init__(self, Cnf):
207 Initialize various variables and the global substitution template mappings.
208 Also connect to the DB and initialize the Database module.
212 self.accept_count = 0
213 self.accept_bytes = 0L
214 self.reject_message = ""
215 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {})
217 # Initialize the substitution template mapping global
218 Subst = self.Subst = {}
219 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
220 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
221 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
222 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
224 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
225 database.init(Cnf, self.projectB)
227 ###########################################################################
229 def init_vars (self):
230 """ Reset a number of entries from our Pkg object. """
231 self.pkg.changes.clear()
233 self.pkg.files.clear()
234 self.pkg.dsc_files.clear()
235 self.pkg.orig_tar_id = None
236 self.pkg.orig_tar_location = ""
237 self.pkg.orig_tar_gz = None
239 ###########################################################################
241 def update_vars (self):
243 Update our Pkg object by reading a previously created cPickle .dak dumpfile.
245 dump_filename = self.pkg.changes_file[:-8]+".dak"
246 dump_file = utils.open_file(dump_filename)
247 p = cPickle.Unpickler(dump_file)
249 self.pkg.changes.update(p.load())
250 self.pkg.dsc.update(p.load())
251 self.pkg.files.update(p.load())
252 self.pkg.dsc_files.update(p.load())
254 self.pkg.orig_tar_id = p.load()
255 self.pkg.orig_tar_location = p.load()
259 ###########################################################################
262 def dump_vars(self, dest_dir):
264 Dump our Pkg object into a cPickle file.
266 @type dest_dir: string
267 @param dest_dir: Path where the dumpfile should be stored
269 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
270 there's some idea of what process-accepted & process-new use from
271 process-unchecked. (JT)
275 changes = self.pkg.changes
277 files = self.pkg.files
278 dsc_files = self.pkg.dsc_files
279 orig_tar_id = self.pkg.orig_tar_id
280 orig_tar_location = self.pkg.orig_tar_location
282 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
283 dump_file = utils.open_file(dump_filename, 'w')
285 os.chmod(dump_filename, 0664)
287 # chmod may fail when the dumpfile is not owned by the user
288 # invoking dak (like e.g. when NEW is processed by a member
290 if errno.errorcode[e.errno] == 'EPERM':
291 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
292 # security precaution, should never happen unless a weird
293 # umask is set anywhere
294 if perms & stat.S_IWOTH:
295 utils.fubar("%s is world writable and chmod failed." % \
297 # ignore the failed chmod otherwise as the file should
298 # already have the right privileges and is just, at worst,
299 # unreadable for world
303 p = cPickle.Pickler(dump_file, 1)
310 for file_entry in files.keys():
311 d_files[file_entry] = {}
312 for i in [ "package", "version", "architecture", "type", "size",
313 "md5sum", "sha1sum", "sha256sum", "component",
314 "location id", "source package", "source version",
315 "maintainer", "dbtype", "files id", "new",
316 "section", "priority", "othercomponents",
317 "pool name", "original component" ]:
318 if files[file_entry].has_key(i):
319 d_files[file_entry][i] = files[file_entry][i]
321 # Mandatory changes fields
322 for i in [ "distribution", "source", "architecture", "version",
323 "maintainer", "urgency", "fingerprint", "changedby822",
324 "changedby2047", "changedbyname", "maintainer822",
325 "maintainer2047", "maintainername", "maintaineremail",
326 "closes", "changes" ]:
327 d_changes[i] = changes[i]
328 # Optional changes fields
329 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
331 if changes.has_key(i):
332 d_changes[i] = changes[i]
334 for i in [ "source", "version", "maintainer", "fingerprint",
335 "uploaders", "bts changelog", "dm-upload-allowed" ]:
339 for file_entry in dsc_files.keys():
340 d_dsc_files[file_entry] = {}
341 # Mandatory dsc_files fields
342 for i in [ "size", "md5sum" ]:
343 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
344 # Optional dsc_files fields
345 for i in [ "files id" ]:
346 if dsc_files[file_entry].has_key(i):
347 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
349 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
350 orig_tar_id, orig_tar_location ]:
354 ###########################################################################
356 # Set up the per-package template substitution mappings
358 def update_subst (self, reject_message = ""):
359 """ Set up the per-package template substitution mappings """
362 changes = self.pkg.changes
363 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
364 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
365 changes["architecture"] = { "Unknown" : "" }
366 # and maintainer2047 may not exist.
367 if not changes.has_key("maintainer2047"):
368 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
370 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
371 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
372 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
374 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
375 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
376 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
377 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
378 changes["maintainer2047"])
379 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
381 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
382 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
383 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
385 if "sponsoremail" in changes:
386 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
388 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
389 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
391 # Apply any global override of the Maintainer field
392 if self.Cnf.get("Dinstall::OverrideMaintainer"):
393 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
394 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
396 Subst["__REJECT_MESSAGE__"] = reject_message
397 Subst["__SOURCE__"] = changes.get("source", "Unknown")
398 Subst["__VERSION__"] = changes.get("version", "Unknown")
400 ###########################################################################
402 def build_summaries(self):
403 """ Build a summary of changes the upload introduces. """
404 changes = self.pkg.changes
405 files = self.pkg.files
407 byhand = summary = new = ""
409 # changes["distribution"] may not exist in corner cases
410 # (e.g. unreadable changes files)
411 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
412 changes["distribution"] = {}
415 file_keys = files.keys()
417 for file_entry in file_keys:
418 if files[file_entry].has_key("byhand"):
420 summary += file_entry + " byhand\n"
421 elif files[file_entry].has_key("new"):
423 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
424 if files[file_entry].has_key("othercomponents"):
425 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
426 if files[file_entry]["type"] == "deb":
427 deb_fh = utils.open_file(file_entry)
428 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
431 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
432 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
433 summary += file_entry + "\n to " + destination + "\n"
434 if not files[file_entry].has_key("type"):
435 files[file_entry]["type"] = "unknown"
436 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
437 # (queue/unchecked), there we have override entries already, use them
438 # (process-new), there we dont have override entries, use the newly generated ones.
439 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
440 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
441 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
443 short_summary = summary
445 # This is for direport's benefit...
446 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
449 summary += "Changes: " + f
451 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
453 summary += self.announce(short_summary, 0)
455 return (summary, short_summary)
457 ###########################################################################
459 def close_bugs (self, summary, action):
461 Send mail to close bugs as instructed by the closes field in the changes file.
462 Also add a line to summary if any work was done.
464 @type summary: string
465 @param summary: summary text, as given by L{build_summaries}
468 @param action: Set to false no real action will be done.
471 @return: summary. If action was taken, extended by the list of closed bugs.
474 changes = self.pkg.changes
478 bugs = changes["closes"].keys()
484 summary += "Closing bugs: "
486 summary += "%s " % (bug)
488 Subst["__BUG_NUMBER__"] = bug
489 if changes["distribution"].has_key("stable"):
490 Subst["__STABLE_WARNING__"] = """
491 Note that this package is not part of the released stable Debian
492 distribution. It may have dependencies on other unreleased software,
493 or other instabilities. Please take care if you wish to install it.
494 The update will eventually make its way into the next released Debian
497 Subst["__STABLE_WARNING__"] = ""
498 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
499 utils.send_mail (mail_message)
501 self.Logger.log(["closing bugs"]+bugs)
506 ###########################################################################
508 def announce (self, short_summary, action):
510 Send an announce mail about a new upload.
512 @type short_summary: string
513 @param short_summary: Short summary text to include in the mail
516 @param action: Set to false no real action will be done.
519 @return: Textstring about action taken.
524 changes = self.pkg.changes
526 # Only do announcements for source uploads with a recent dpkg-dev installed
527 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
532 Subst["__SHORT_SUMMARY__"] = short_summary
534 for dist in changes["distribution"].keys():
535 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
536 if announce_list == "" or lists_done.has_key(announce_list):
538 lists_done[announce_list] = 1
539 summary += "Announcing to %s\n" % (announce_list)
542 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
543 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
544 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
545 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
546 utils.send_mail (mail_message)
548 if Cnf.FindB("Dinstall::CloseBugs"):
549 summary = self.close_bugs(summary, action)
553 ###########################################################################
555 def accept (self, summary, short_summary, targetdir=None):
559 This moves all files referenced from the .changes into the I{accepted}
560 queue, sends the accepted mail, announces to lists, closes bugs and
561 also checks for override disparities. If enabled it will write out
562 the version history for the BTS Version Tracking and will finally call
565 @type summary: string
566 @param summary: Summary text
568 @type short_summary: string
569 @param short_summary: Short summary
575 files = self.pkg.files
576 changes = self.pkg.changes
577 changes_file = self.pkg.changes_file
580 if targetdir is None:
581 targetdir = Cnf["Dir::Queue::Accepted"]
584 self.Logger.log(["Accepting changes",changes_file])
586 self.dump_vars(targetdir)
588 # Move all the files into the accepted directory
589 utils.move(changes_file, targetdir)
590 file_keys = files.keys()
591 for file_entry in file_keys:
592 utils.move(file_entry, targetdir)
593 self.accept_bytes += float(files[file_entry]["size"])
594 self.accept_count += 1
596 # Send accept mail, announce to lists, close bugs and check for
597 # override disparities
598 if not Cnf["Dinstall::Options::No-Mail"]:
599 Subst["__SUITE__"] = ""
600 Subst["__SUMMARY__"] = summary
601 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
602 utils.send_mail(mail_message)
603 self.announce(short_summary, 1)
606 ## Helper stuff for DebBugs Version Tracking
607 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
608 # ??? once queue/* is cleared on *.d.o and/or reprocessed
609 # the conditionalization on dsc["bts changelog"] should be
612 # Write out the version history from the changelog
613 if changes["architecture"].has_key("source") and \
614 dsc.has_key("bts changelog"):
616 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
617 version_history = os.fdopen(fd, 'w')
618 version_history.write(dsc["bts changelog"])
619 version_history.close()
620 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
621 changes_file[:-8]+".versions")
622 os.rename(temp_filename, filename)
623 os.chmod(filename, 0644)
625 # Write out the binary -> source mapping.
626 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
627 debinfo = os.fdopen(fd, 'w')
628 for file_entry in file_keys:
629 f = files[file_entry]
630 if f["type"] == "deb":
631 line = " ".join([f["package"], f["version"],
632 f["architecture"], f["source package"],
633 f["source version"]])
634 debinfo.write(line+"\n")
636 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
637 changes_file[:-8]+".debinfo")
638 os.rename(temp_filename, filename)
639 os.chmod(filename, 0644)
641 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
642 # <Ganneff> we do call queue_build too
643 # <mhy> well yes, we'd have had to if we were inserting into accepted
644 # <Ganneff> now. thats database only.
645 # <mhy> urgh, that's going to get messy
646 # <Ganneff> so i make the p-n call to it *also* using accepted/
647 # <mhy> but then the packages will be in the queue_build table without the files being there
648 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
649 # <mhy> ah, good point
650 # <Ganneff> so it will work out, as unchecked move it over
651 # <mhy> that's all completely sick
653 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
655 ###########################################################################
657 def queue_build (self, queue, path):
659 Prepare queue_build database table used for incoming autobuild support.
662 @param queue: queue name
665 @param path: path for the queue file entries/link destinations
670 files = self.pkg.files
671 changes = self.pkg.changes
672 changes_file = self.pkg.changes_file
674 file_keys = files.keys()
676 ## Special support to enable clean auto-building of queued packages
677 queue_id = database.get_or_set_queue_id(queue)
679 self.projectB.query("BEGIN WORK")
680 for suite in changes["distribution"].keys():
681 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
683 suite_id = database.get_suite_id(suite)
684 dest_dir = Cnf["Dir::QueueBuild"]
685 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
686 dest_dir = os.path.join(dest_dir, suite)
687 for file_entry in file_keys:
688 src = os.path.join(path, file_entry)
689 dest = os.path.join(dest_dir, file_entry)
690 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
691 # Copy it since the original won't be readable by www-data
692 utils.copy(src, dest)
694 # Create a symlink to it
695 os.symlink(src, dest)
696 # Add it to the list of packages for later processing by apt-ftparchive
697 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
698 # If the .orig.tar.gz is in the pool, create a symlink to
699 # it (if one doesn't already exist)
700 if self.pkg.orig_tar_id:
701 # Determine the .orig.tar.gz file name
702 for dsc_file in self.pkg.dsc_files.keys():
703 if dsc_file.endswith(".orig.tar.gz"):
705 dest = os.path.join(dest_dir, filename)
706 # If it doesn't exist, create a symlink
707 if not os.path.exists(dest):
708 # Find the .orig.tar.gz in the pool
709 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
712 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
713 src = os.path.join(ql[0][0], ql[0][1])
714 os.symlink(src, dest)
715 # Add it to the list of packages for later processing by apt-ftparchive
716 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
717 # if it does, update things to ensure it's not removed prematurely
719 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
721 self.projectB.query("COMMIT WORK")
723 ###########################################################################
725 def check_override (self):
727 Checks override entries for validity. Mails "Override disparity" warnings,
728 if that feature is enabled.
730 Abandons the check if
731 - this is a non-sourceful upload
732 - override disparity checks are disabled
733 - mail sending is disabled
737 changes = self.pkg.changes
738 files = self.pkg.files
741 # Abandon the check if:
742 # a) it's a non-sourceful upload
743 # b) override disparity checks have been disabled
744 # c) we're not sending mail
745 if not changes["architecture"].has_key("source") or \
746 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
747 Cnf["Dinstall::Options::No-Mail"]:
751 file_keys = files.keys()
753 for file_entry in file_keys:
754 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
755 section = files[file_entry]["section"]
756 override_section = files[file_entry]["override section"]
757 if section.lower() != override_section.lower() and section != "-":
758 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
759 priority = files[file_entry]["priority"]
760 override_priority = files[file_entry]["override priority"]
761 if priority != override_priority and priority != "-":
762 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
767 Subst["__SUMMARY__"] = summary
768 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
769 utils.send_mail(mail_message)
771 ###########################################################################
773 def force_reject (self, files):
775 Forcefully move files from the current directory to the
776 reject directory. If any file already exists in the reject
777 directory it will be moved to the morgue to make way for
781 @param files: file dictionary
787 for file_entry in files:
788 # Skip any files which don't exist or which we don't have permission to copy.
789 if os.access(file_entry,os.R_OK) == 0:
791 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
793 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
795 # File exists? Let's try and move it to the morgue
796 if errno.errorcode[e.errno] == 'EEXIST':
797 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
799 morgue_file = utils.find_next_free(morgue_file)
800 except NoFreeFilenameError:
801 # Something's either gone badly Pete Tong, or
802 # someone is trying to exploit us.
803 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
805 utils.move(dest_file, morgue_file, perms=0660)
807 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
810 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
814 # If we got here, we own the destination file, so we can
815 # safely overwrite it.
816 utils.move(file_entry, dest_file, 1, perms=0660)
819 ###########################################################################
821 def do_reject (self, manual = 0, reject_message = "", note = ""):
823 Reject an upload. If called without a reject message or C{manual} is
824 true, spawn an editor so the user can write one.
827 @param manual: manual or automated rejection
829 @type reject_message: string
830 @param reject_message: A reject message
835 # If we weren't given a manual rejection message, spawn an
836 # editor so the user can add one in...
837 if manual and not reject_message:
838 (fd, temp_filename) = utils.temp_filename()
839 temp_file = os.fdopen(fd, 'w')
842 temp_file.write(line)
844 editor = os.environ.get("EDITOR","vi")
847 os.system("%s %s" % (editor, temp_filename))
848 temp_fh = utils.open_file(temp_filename)
849 reject_message = "".join(temp_fh.readlines())
851 print "Reject message:"
852 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
853 prompt = "[R]eject, Edit, Abandon, Quit ?"
855 while prompt.find(answer) == -1:
856 answer = utils.our_raw_input(prompt)
857 m = re_default_answer.search(prompt)
860 answer = answer[:1].upper()
861 os.unlink(temp_filename)
873 reason_filename = pkg.changes_file[:-8] + ".reason"
874 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
876 # Move all the files into the reject directory
877 reject_files = pkg.files.keys() + [pkg.changes_file]
878 self.force_reject(reject_files)
880 # If we fail here someone is probably trying to exploit the race
881 # so let's just raise an exception ...
882 if os.path.exists(reason_filename):
883 os.unlink(reason_filename)
884 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
887 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
888 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
889 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
890 os.write(reason_fd, reject_message)
891 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
893 # Build up the rejection email
894 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
896 Subst["__REJECTOR_ADDRESS__"] = user_email_address
897 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
898 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
899 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
900 # Write the rejection email out as the <foo>.reason file
901 os.write(reason_fd, reject_mail_message)
905 # Send the rejection mail if appropriate
906 if not Cnf["Dinstall::Options::No-Mail"]:
907 utils.send_mail(reject_mail_message)
909 self.Logger.log(["rejected", pkg.changes_file])
912 ################################################################################
914 def source_exists (self, package, source_version, suites = ["any"]):
916 Ensure that source exists somewhere in the archive for the binary
917 upload being processed.
918 1. exact match => 1.0-3
919 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
921 @type package: string
922 @param package: package source name
924 @type source_version: string
925 @param source_version: expected source version
928 @param suites: list of suites to check in, default I{any}
931 @return: returns 1 if a source with expected version is found, otherwise 0
937 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
940 # source must exist in suite X, or in some other suite that's
941 # mapped to X, recursively... silent-maps are counted too,
942 # unreleased-maps aren't.
943 maps = self.Cnf.ValueList("SuiteMappings")[:]
945 maps = [ m.split() for m in maps ]
946 maps = [ (x[1], x[2]) for x in maps
947 if x[0] == "map" or x[0] == "silent-map" ]
950 if x[1] in s and x[0] not in s:
953 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
954 q = self.projectB.query(que)
956 # Reduce the query results to a list of version numbers
957 ql = [ i[0] for i in q.getresult() ]
960 if source_version in ql:
964 orig_source_version = re_bin_only_nmu.sub('', source_version)
965 if orig_source_version in ql:
973 ################################################################################
975 def in_override_p (self, package, component, suite, binary_type, file):
977 Check if a package already has override entries in the DB
979 @type package: string
980 @param package: package name
982 @type component: string
983 @param component: database id of the component, as returned by L{database.get_component_id}
986 @param suite: database id of the suite, as returned by L{database.get_suite_id}
988 @type binary_type: string
989 @param binary_type: type of the package
992 @param file: filename we check
994 @return: the database result. But noone cares anyway.
997 files = self.pkg.files
999 if binary_type == "": # must be source
1002 file_type = binary_type
1004 # Override suite name; used for example with proposed-updates
1005 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1006 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
1008 # Avoid <undef> on unknown distributions
1009 suite_id = database.get_suite_id(suite)
1012 component_id = database.get_component_id(component)
1013 type_id = database.get_override_type_id(file_type)
1015 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
1016 % (package, suite_id, component_id, type_id))
1017 result = q.getresult()
1018 # If checking for a source package fall back on the binary override type
1019 if file_type == "dsc" and not result:
1020 deb_type_id = database.get_override_type_id("deb")
1021 udeb_type_id = database.get_override_type_id("udeb")
1022 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
1023 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
1024 result = q.getresult()
1026 # Remember the section and priority so we can check them later if appropriate
1028 files[file]["override section"] = result[0][0]
1029 files[file]["override priority"] = result[0][1]
1033 ################################################################################
1035 def reject (self, str, prefix="Rejected: "):
1037 Add C{str} to reject_message. Adds C{prefix}, by default "Rejected: "
1040 @param str: Reject text
1042 @type prefix: string
1043 @param prefix: Prefix text, default Rejected:
1047 # Unlike other rejects we add new lines first to avoid trailing
1048 # new lines when this message is passed back up to a caller.
1049 if self.reject_message:
1050 self.reject_message += "\n"
1051 self.reject_message += prefix + str
1053 ################################################################################
1055 def get_anyversion(self, query_result, suite):
1058 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1059 for (v, s) in query_result:
1060 if s in [ x.lower() for x in anysuite ]:
1061 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1065 ################################################################################
1067 def cross_suite_version_check(self, query_result, file, new_version,
1070 Ensure versions are newer than existing packages in target
1071 suites and that cross-suite version checking rules as
1072 set out in the conf file are satisfied.
1076 # Check versions for each target suite
1077 for target_suite in self.pkg.changes["distribution"].keys():
1078 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1079 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1080 # Enforce "must be newer than target suite" even if conffile omits it
1081 if target_suite not in must_be_newer_than:
1082 must_be_newer_than.append(target_suite)
1083 for entry in query_result:
1084 existent_version = entry[0]
1086 if suite in must_be_newer_than and sourceful and \
1087 apt_pkg.VersionCompare(new_version, existent_version) < 1:
1088 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1089 if suite in must_be_older_than and \
1090 apt_pkg.VersionCompare(new_version, existent_version) > -1:
1091 ch = self.pkg.changes
1093 if ch.get('distribution-version', {}).has_key(suite):
1094 # we really use the other suite, ignoring the conflicting one ...
1095 addsuite = ch["distribution-version"][suite]
1097 add_version = self.get_anyversion(query_result, addsuite)
1098 target_version = self.get_anyversion(query_result, target_suite)
1101 # not add_version can only happen if we map to a suite
1102 # that doesn't enhance the suite we're propup'ing from.
1103 # so "propup-ver x a b c; map a d" is a problem only if
1104 # d doesn't enhance a.
1106 # i think we could always propagate in this case, rather
1107 # than complaining. either way, this isn't a REJECT issue
1109 # And - we really should complain to the dorks who configured dak
1110 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
1111 self.pkg.changes.setdefault("propdistribution", {})
1112 self.pkg.changes["propdistribution"][addsuite] = 1
1114 elif not target_version:
1115 # not targets_version is true when the package is NEW
1116 # we could just stick with the "...old version..." REJECT
1117 # for this, I think.
1118 self.reject("Won't propogate NEW packages.")
1119 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1120 # propogation would be redundant. no need to reject though.
1121 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
1123 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1124 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1126 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
1127 self.pkg.changes.setdefault("propdistribution", {})
1128 self.pkg.changes["propdistribution"][addsuite] = 1
1132 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1134 ################################################################################
1136 def check_binary_against_db(self, file):
1140 self.reject_message = ""
1141 files = self.pkg.files
1143 # Ensure version is sane
1144 q = self.projectB.query("""
1145 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
1147 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
1148 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
1149 % (files[file]["package"],
1150 files[file]["architecture"]))
1151 self.cross_suite_version_check(q.getresult(), file,
1152 files[file]["version"], sourceful=False)
1154 # Check for any existing copies of the file
1155 q = self.projectB.query("""
1156 SELECT b.id FROM binaries b, architecture a
1157 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
1158 AND a.id = b.architecture"""
1159 % (files[file]["package"],
1160 files[file]["version"],
1161 files[file]["architecture"]))
1163 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
1165 return self.reject_message
1167 ################################################################################
1169 def check_source_against_db(self, file):
1172 self.reject_message = ""
1175 # Ensure version is sane
1176 q = self.projectB.query("""
1177 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
1178 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
1179 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
1182 return self.reject_message
1184 ################################################################################
1187 def check_dsc_against_db(self, file):
1190 @warning: NB: this function can remove entries from the 'files' index [if
1191 the .orig.tar.gz is a duplicate of the one in the archive]; if
1192 you're iterating over 'files' and call this function as part of
1193 the loop, be sure to add a check to the top of the loop to
1194 ensure you haven't just tried to dereference the deleted entry.
1197 self.reject_message = ""
1198 files = self.pkg.files
1199 dsc_files = self.pkg.dsc_files
1200 self.pkg.orig_tar_gz = None
1202 # Try and find all files mentioned in the .dsc. This has
1203 # to work harder to cope with the multiple possible
1204 # locations of an .orig.tar.gz.
1205 # The ordering on the select is needed to pick the newest orig
1206 # when it exists in multiple places.
1207 for dsc_file in dsc_files.keys():
1209 if files.has_key(dsc_file):
1210 actual_md5 = files[dsc_file]["md5sum"]
1211 actual_size = int(files[dsc_file]["size"])
1212 found = "%s in incoming" % (dsc_file)
1213 # Check the file does not already exist in the archive
1214 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
1216 # Strip out anything that isn't '%s' or '/%s$'
1218 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
1221 # "[dak] has not broken them. [dak] has fixed a
1222 # brokenness. Your crappy hack exploited a bug in
1225 # "(Come on! I thought it was always obvious that
1226 # one just doesn't release different files with
1227 # the same name and version.)"
1228 # -- ajk@ on d-devel@l.d.o
1231 # Ignore exact matches for .orig.tar.gz
1233 if dsc_file.endswith(".orig.tar.gz"):
1235 if files.has_key(dsc_file) and \
1236 int(files[dsc_file]["size"]) == int(i[0]) and \
1237 files[dsc_file]["md5sum"] == i[1]:
1238 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1240 self.pkg.orig_tar_gz = i[2] + i[3]
1244 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1245 elif dsc_file.endswith(".orig.tar.gz"):
1247 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1249 # Strip out anything that isn't '%s' or '/%s$'
1251 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1255 # Unfortunately, we may get more than one match here if,
1256 # for example, the package was in potato but had an -sa
1257 # upload in woody. So we need to choose the right one.
1259 # default to something sane in case we don't match any or have only one
1264 old_file = i[0] + i[1]
1265 old_file_fh = utils.open_file(old_file)
1266 actual_md5 = apt_pkg.md5sum(old_file_fh)
1268 actual_size = os.stat(old_file)[stat.ST_SIZE]
1269 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1272 old_file = x[0] + x[1]
1273 old_file_fh = utils.open_file(old_file)
1274 actual_md5 = apt_pkg.md5sum(old_file_fh)
1276 actual_size = os.stat(old_file)[stat.ST_SIZE]
1279 # need this for updating dsc_files in install()
1280 dsc_files[dsc_file]["files id"] = x[3]
1281 # See install() in process-accepted...
1282 self.pkg.orig_tar_id = x[3]
1283 self.pkg.orig_tar_gz = old_file
1284 self.pkg.orig_tar_location = x[4]
1286 # Not there? Check the queue directories...
1288 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1289 # See process_it() in 'dak process-unchecked' for explanation of this
1290 # in_unchecked check dropped by ajt 2007-08-28, how did that
1292 if os.path.exists(in_unchecked) and False:
1293 return (self.reject_message, in_unchecked)
1295 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1296 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1297 if os.path.exists(in_otherdir):
1298 in_otherdir_fh = utils.open_file(in_otherdir)
1299 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1300 in_otherdir_fh.close()
1301 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1303 self.pkg.orig_tar_gz = in_otherdir
1306 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1307 self.pkg.orig_tar_gz = -1
1310 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1312 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1313 self.reject("md5sum for %s doesn't match %s." % (found, file))
1314 if actual_size != int(dsc_files[dsc_file]["size"]):
1315 self.reject("size for %s doesn't match %s." % (found, file))
1317 return (self.reject_message, None)
1319 def do_query(self, query):
1321 Executes a database query. Writes statistics / timing to stderr.
1324 @param query: database query string, passed unmodified
1328 @warning: The query is passed B{unmodified}, so be careful what you use this for.
1330 sys.stderr.write("query: \"%s\" ... " % (query))
1331 before = time.time()
1332 r = self.projectB.query(query)
1333 time_diff = time.time()-before
1334 sys.stderr.write("took %.3f seconds.\n" % (time_diff))