5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
40 from dak_exceptions import *
41 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
45 ###############################################################################
47 # Determine what parts in a .changes are NEW
49 def determine_new(changes, files, projectB, warn=1):
51 Determine what parts in a C{changes} file are NEW.
53 @type changes: Upload.Pkg.changes dict
54 @param changes: Changes dictionary
56 @type files: Upload.Pkg.files dict
57 @param files: Files dictionary
59 @type projectB: pgobject
60 @param projectB: DB handle
63 @param warn: Warn if overrides are added for (old)stable
66 @return: dictionary of NEW components.
71 # Build up a list of potentially new things
72 for file_entry in files.keys():
74 # Skip byhand elements
75 if f["type"] == "byhand":
78 priority = f["priority"]
79 section = f["section"]
80 file_type = get_type(f)
81 component = f["component"]
83 if file_type == "dsc":
85 if not new.has_key(pkg):
87 new[pkg]["priority"] = priority
88 new[pkg]["section"] = section
89 new[pkg]["type"] = file_type
90 new[pkg]["component"] = component
91 new[pkg]["files"] = []
93 old_type = new[pkg]["type"]
94 if old_type != file_type:
95 # source gets trumped by deb or udeb
97 new[pkg]["priority"] = priority
98 new[pkg]["section"] = section
99 new[pkg]["type"] = file_type
100 new[pkg]["component"] = component
101 new[pkg]["files"].append(file_entry)
102 if f.has_key("othercomponents"):
103 new[pkg]["othercomponents"] = f["othercomponents"]
105 for suite in changes["suite"].keys():
106 suite_id = database.get_suite_id(suite)
107 for pkg in new.keys():
108 component_id = database.get_component_id(new[pkg]["component"])
109 type_id = database.get_override_type_id(new[pkg]["type"])
110 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
113 for file_entry in new[pkg]["files"]:
114 if files[file_entry].has_key("new"):
115 del files[file_entry]["new"]
119 if changes["suite"].has_key("stable"):
120 print "WARNING: overrides will be added for stable!"
121 if changes["suite"].has_key("oldstable"):
122 print "WARNING: overrides will be added for OLDstable!"
123 for pkg in new.keys():
124 if new[pkg].has_key("othercomponents"):
125 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
129 ################################################################################
133 Get the file type of C{file}
136 @param file: file entry
143 if file.has_key("dbtype"):
144 file_type = file["dbtype"]
145 elif file["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
148 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
150 # Validate the override type
151 type_id = database.get_override_type_id(file_type)
153 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
157 ################################################################################
161 def check_valid(new):
163 Check if section and priority for NEW packages exist in database.
164 Additionally does sanity checks:
165 - debian-installer packages have to be udeb (or source)
166 - non debian-installer packages can not be udeb
167 - source priority can only be assigned to dsc file types
170 @param new: Dict of new packages with their section, priority and type.
173 for pkg in new.keys():
174 section = new[pkg]["section"]
175 priority = new[pkg]["priority"]
176 file_type = new[pkg]["type"]
177 new[pkg]["section id"] = database.get_section_id(section)
178 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
180 di = section.find("debian-installer") != -1
181 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
182 new[pkg]["section id"] = -1
183 if (priority == "source" and file_type != "dsc") or \
184 (priority != "source" and file_type == "dsc"):
185 new[pkg]["priority id"] = -1
188 ###############################################################################
191 """ Convenience wrapper to carry around all the package information """
192 def __init__(self, **kwds):
193 self.__dict__.update(kwds)
195 def update(self, **kwds):
196 self.__dict__.update(kwds)
198 ###############################################################################
202 Everything that has to do with an upload processed.
205 def __init__(self, Cnf):
207 Initialize various variables and the global substitution template mappings.
208 Also connect to the DB and initialize the Database module.
212 self.accept_count = 0
213 self.accept_bytes = 0L
214 self.reject_message = ""
215 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {})
217 # Initialize the substitution template mapping global
218 Subst = self.Subst = {}
219 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
220 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
221 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
222 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
224 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
225 database.init(Cnf, self.projectB)
227 ###########################################################################
229 def init_vars (self):
230 """ Reset a number of entries from our Pkg object. """
231 self.pkg.changes.clear()
233 self.pkg.files.clear()
234 self.pkg.dsc_files.clear()
235 self.pkg.orig_tar_id = None
236 self.pkg.orig_tar_location = ""
237 self.pkg.orig_tar_gz = None
239 ###########################################################################
241 def update_vars (self):
243 Update our Pkg object by reading a previously created cPickle .dak dumpfile.
245 dump_filename = self.pkg.changes_file[:-8]+".dak"
246 dump_file = utils.open_file(dump_filename)
247 p = cPickle.Unpickler(dump_file)
249 self.pkg.changes.update(p.load())
250 self.pkg.dsc.update(p.load())
251 self.pkg.files.update(p.load())
252 self.pkg.dsc_files.update(p.load())
254 self.pkg.orig_tar_id = p.load()
255 self.pkg.orig_tar_location = p.load()
259 ###########################################################################
262 def dump_vars(self, dest_dir):
264 Dump our Pkg object into a cPickle file.
266 @type dest_dir: string
267 @param dest_dir: Path where the dumpfile should be stored
269 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
270 there's some idea of what process-accepted & process-new use from
271 process-unchecked. (JT)
275 changes = self.pkg.changes
277 files = self.pkg.files
278 dsc_files = self.pkg.dsc_files
279 orig_tar_id = self.pkg.orig_tar_id
280 orig_tar_location = self.pkg.orig_tar_location
282 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
283 dump_file = utils.open_file(dump_filename, 'w')
285 os.chmod(dump_filename, 0664)
287 # chmod may fail when the dumpfile is not owned by the user
288 # invoking dak (like e.g. when NEW is processed by a member
290 if errno.errorcode[e.errno] == 'EPERM':
291 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
292 # security precaution, should never happen unless a weird
293 # umask is set anywhere
294 if perms & stat.S_IWOTH:
295 utils.fubar("%s is world writable and chmod failed." % \
297 # ignore the failed chmod otherwise as the file should
298 # already have the right privileges and is just, at worst,
299 # unreadable for world
303 p = cPickle.Pickler(dump_file, 1)
310 for file_entry in files.keys():
311 d_files[file_entry] = {}
312 for i in [ "package", "version", "architecture", "type", "size",
313 "md5sum", "sha1sum", "sha256sum", "component",
314 "location id", "source package", "source version",
315 "maintainer", "dbtype", "files id", "new",
316 "section", "priority", "othercomponents",
317 "pool name", "original component" ]:
318 if files[file_entry].has_key(i):
319 d_files[file_entry][i] = files[file_entry][i]
321 # Mandatory changes fields
322 for i in [ "distribution", "source", "architecture", "version",
323 "maintainer", "urgency", "fingerprint", "changedby822",
324 "changedby2047", "changedbyname", "maintainer822",
325 "maintainer2047", "maintainername", "maintaineremail",
326 "closes", "changes" ]:
327 d_changes[i] = changes[i]
328 # Optional changes fields
329 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
331 if changes.has_key(i):
332 d_changes[i] = changes[i]
334 for i in [ "source", "version", "maintainer", "fingerprint",
335 "uploaders", "bts changelog", "dm-upload-allowed" ]:
339 for file_entry in dsc_files.keys():
340 d_dsc_files[file_entry] = {}
341 # Mandatory dsc_files fields
342 for i in [ "size", "md5sum" ]:
343 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
344 # Optional dsc_files fields
345 for i in [ "files id" ]:
346 if dsc_files[file_entry].has_key(i):
347 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
349 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
350 orig_tar_id, orig_tar_location ]:
354 ###########################################################################
356 # Set up the per-package template substitution mappings
358 def update_subst (self, reject_message = ""):
359 """ Set up the per-package template substitution mappings """
362 changes = self.pkg.changes
363 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
364 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
365 changes["architecture"] = { "Unknown" : "" }
366 # and maintainer2047 may not exist.
367 if not changes.has_key("maintainer2047"):
368 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
370 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
371 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
372 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
374 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
375 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
376 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
377 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
378 changes["maintainer2047"])
379 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
381 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
382 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
383 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
385 if "sponsoremail" in changes:
386 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
388 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
389 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
391 # Apply any global override of the Maintainer field
392 if self.Cnf.get("Dinstall::OverrideMaintainer"):
393 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
394 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
396 Subst["__REJECT_MESSAGE__"] = reject_message
397 Subst["__SOURCE__"] = changes.get("source", "Unknown")
398 Subst["__VERSION__"] = changes.get("version", "Unknown")
400 ###########################################################################
402 def build_summaries(self):
403 """ Build a summary of changes the upload introduces. """
404 changes = self.pkg.changes
405 files = self.pkg.files
407 byhand = summary = new = ""
409 # changes["distribution"] may not exist in corner cases
410 # (e.g. unreadable changes files)
411 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
412 changes["distribution"] = {}
415 file_keys = files.keys()
417 for file_entry in file_keys:
418 if files[file_entry].has_key("byhand"):
420 summary += file_entry + " byhand\n"
421 elif files[file_entry].has_key("new"):
423 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
424 if files[file_entry].has_key("othercomponents"):
425 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
426 if files[file_entry]["type"] == "deb":
427 deb_fh = utils.open_file(file_entry)
428 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
431 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
432 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
433 summary += file_entry + "\n to " + destination + "\n"
434 if not files[file_entry].has_key("type"):
435 files[file_entry]["type"] = "unknown"
436 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
437 # (queue/unchecked), there we have override entries already, use them
438 # (process-new), there we dont have override entries, use the newly generated ones.
439 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
440 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
441 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
443 short_summary = summary
445 # This is for direport's benefit...
446 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
449 summary += "Changes: " + f
451 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
453 summary += self.announce(short_summary, 0)
455 return (summary, short_summary)
457 ###########################################################################
459 def close_bugs (self, summary, action):
461 Send mail to close bugs as instructed by the closes field in the changes file.
462 Also add a line to summary if any work was done.
464 @type summary: string
465 @param summary: summary text, as given by L{build_summaries}
468 @param action: Set to false no real action will be done.
471 @return: summary. If action was taken, extended by the list of closed bugs.
474 changes = self.pkg.changes
478 bugs = changes["closes"].keys()
484 summary += "Closing bugs: "
486 summary += "%s " % (bug)
488 Subst["__BUG_NUMBER__"] = bug
489 if changes["distribution"].has_key("stable"):
490 Subst["__STABLE_WARNING__"] = """
491 Note that this package is not part of the released stable Debian
492 distribution. It may have dependencies on other unreleased software,
493 or other instabilities. Please take care if you wish to install it.
494 The update will eventually make its way into the next released Debian
497 Subst["__STABLE_WARNING__"] = ""
498 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
499 utils.send_mail (mail_message)
501 self.Logger.log(["closing bugs"]+bugs)
506 ###########################################################################
508 def announce (self, short_summary, action):
510 Send an announce mail about a new upload.
512 @type short_summary: string
513 @param short_summary: Short summary text to include in the mail
516 @param action: Set to false no real action will be done.
519 @return: Textstring about action taken.
524 changes = self.pkg.changes
526 # Only do announcements for source uploads with a recent dpkg-dev installed
527 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
532 Subst["__SHORT_SUMMARY__"] = short_summary
534 for dist in changes["distribution"].keys():
535 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
536 if announce_list == "" or lists_done.has_key(announce_list):
538 lists_done[announce_list] = 1
539 summary += "Announcing to %s\n" % (announce_list)
542 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
543 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
544 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
545 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
546 utils.send_mail (mail_message)
548 if Cnf.FindB("Dinstall::CloseBugs"):
549 summary = self.close_bugs(summary, action)
553 ###########################################################################
555 def accept (self, summary, short_summary):
559 This moves all files referenced from the .changes into the I{accepted}
560 queue, sends the accepted mail, announces to lists, closes bugs and
561 also checks for override disparities. If enabled it will write out
562 the version history for the BTS Version Tracking and will finally call
565 @type summary: string
566 @param summary: Summary text
568 @type short_summary: string
569 @param short_summary: Short summary
575 files = self.pkg.files
576 changes = self.pkg.changes
577 changes_file = self.pkg.changes_file
581 self.Logger.log(["Accepting changes",changes_file])
583 self.dump_vars(Cnf["Dir::Queue::Accepted"])
585 # Move all the files into the accepted directory
586 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
587 file_keys = files.keys()
588 for file_entry in file_keys:
589 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
590 self.accept_bytes += float(files[file_entry]["size"])
591 self.accept_count += 1
593 # Send accept mail, announce to lists, close bugs and check for
594 # override disparities
595 if not Cnf["Dinstall::Options::No-Mail"]:
596 Subst["__SUITE__"] = ""
597 Subst["__SUMMARY__"] = summary
598 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
599 utils.send_mail(mail_message)
600 self.announce(short_summary, 1)
603 ## Helper stuff for DebBugs Version Tracking
604 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
605 # ??? once queue/* is cleared on *.d.o and/or reprocessed
606 # the conditionalization on dsc["bts changelog"] should be
609 # Write out the version history from the changelog
610 if changes["architecture"].has_key("source") and \
611 dsc.has_key("bts changelog"):
613 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
614 version_history = os.fdopen(fd, 'w')
615 version_history.write(dsc["bts changelog"])
616 version_history.close()
617 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
618 changes_file[:-8]+".versions")
619 os.rename(temp_filename, filename)
620 os.chmod(filename, 0644)
622 # Write out the binary -> source mapping.
623 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
624 debinfo = os.fdopen(fd, 'w')
625 for file_entry in file_keys:
626 f = files[file_entry]
627 if f["type"] == "deb":
628 line = " ".join([f["package"], f["version"],
629 f["architecture"], f["source package"],
630 f["source version"]])
631 debinfo.write(line+"\n")
633 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
634 changes_file[:-8]+".debinfo")
635 os.rename(temp_filename, filename)
636 os.chmod(filename, 0644)
638 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
640 ###########################################################################
642 def queue_build (self, queue, path):
644 Prepare queue_build database table used for incoming autobuild support.
647 @param queue: queue name
650 @param path: path for the queue file entries/link destinations
655 files = self.pkg.files
656 changes = self.pkg.changes
657 changes_file = self.pkg.changes_file
659 file_keys = files.keys()
661 ## Special support to enable clean auto-building of queued packages
662 queue_id = database.get_or_set_queue_id(queue)
664 self.projectB.query("BEGIN WORK")
665 for suite in changes["distribution"].keys():
666 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
668 suite_id = database.get_suite_id(suite)
669 dest_dir = Cnf["Dir::QueueBuild"]
670 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
671 dest_dir = os.path.join(dest_dir, suite)
672 for file_entry in file_keys:
673 src = os.path.join(path, file_entry)
674 dest = os.path.join(dest_dir, file_entry)
675 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
676 # Copy it since the original won't be readable by www-data
677 utils.copy(src, dest)
679 # Create a symlink to it
680 os.symlink(src, dest)
681 # Add it to the list of packages for later processing by apt-ftparchive
682 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
683 # If the .orig.tar.gz is in the pool, create a symlink to
684 # it (if one doesn't already exist)
685 if self.pkg.orig_tar_id:
686 # Determine the .orig.tar.gz file name
687 for dsc_file in self.pkg.dsc_files.keys():
688 if dsc_file.endswith(".orig.tar.gz"):
690 dest = os.path.join(dest_dir, filename)
691 # If it doesn't exist, create a symlink
692 if not os.path.exists(dest):
693 # Find the .orig.tar.gz in the pool
694 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
697 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
698 src = os.path.join(ql[0][0], ql[0][1])
699 os.symlink(src, dest)
700 # Add it to the list of packages for later processing by apt-ftparchive
701 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
702 # if it does, update things to ensure it's not removed prematurely
704 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
706 self.projectB.query("COMMIT WORK")
708 ###########################################################################
710 def check_override (self):
712 Checks override entries for validity. Mails "Override disparity" warnings,
713 if that feature is enabled.
715 Abandons the check if
716 - this is a non-sourceful upload
717 - override disparity checks are disabled
718 - mail sending is disabled
722 changes = self.pkg.changes
723 files = self.pkg.files
726 # Abandon the check if:
727 # a) it's a non-sourceful upload
728 # b) override disparity checks have been disabled
729 # c) we're not sending mail
730 if not changes["architecture"].has_key("source") or \
731 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
732 Cnf["Dinstall::Options::No-Mail"]:
736 file_keys = files.keys()
738 for file_entry in file_keys:
739 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
740 section = files[file_entry]["section"]
741 override_section = files[file_entry]["override section"]
742 if section.lower() != override_section.lower() and section != "-":
743 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
744 priority = files[file_entry]["priority"]
745 override_priority = files[file_entry]["override priority"]
746 if priority != override_priority and priority != "-":
747 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
752 Subst["__SUMMARY__"] = summary
753 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
754 utils.send_mail(mail_message)
756 ###########################################################################
758 def force_reject (self, files):
760 Forcefully move files from the current directory to the
761 reject directory. If any file already exists in the reject
762 directory it will be moved to the morgue to make way for
766 @param files: file dictionary
772 for file_entry in files:
773 # Skip any files which don't exist or which we don't have permission to copy.
774 if os.access(file_entry,os.R_OK) == 0:
776 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
778 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
780 # File exists? Let's try and move it to the morgue
781 if errno.errorcode[e.errno] == 'EEXIST':
782 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
784 morgue_file = utils.find_next_free(morgue_file)
785 except NoFreeFilenameError:
786 # Something's either gone badly Pete Tong, or
787 # someone is trying to exploit us.
788 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
790 utils.move(dest_file, morgue_file, perms=0660)
792 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
795 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
799 # If we got here, we own the destination file, so we can
800 # safely overwrite it.
801 utils.move(file_entry, dest_file, 1, perms=0660)
804 ###########################################################################
806 def do_reject (self, manual = 0, reject_message = "", note = ""):
808 Reject an upload. If called without a reject message or C{manual} is
809 true, spawn an editor so the user can write one.
812 @param manual: manual or automated rejection
814 @type reject_message: string
815 @param reject_message: A reject message
820 # If we weren't given a manual rejection message, spawn an
821 # editor so the user can add one in...
822 if manual and not reject_message:
823 (fd, temp_filename) = utils.temp_filename()
824 temp_file = os.fdopen(fd, 'w')
827 temp_file.write(line)
829 editor = os.environ.get("EDITOR","vi")
832 os.system("%s %s" % (editor, temp_filename))
833 temp_fh = utils.open_file(temp_filename)
834 reject_message = "".join(temp_fh.readlines())
836 print "Reject message:"
837 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
838 prompt = "[R]eject, Edit, Abandon, Quit ?"
840 while prompt.find(answer) == -1:
841 answer = utils.our_raw_input(prompt)
842 m = re_default_answer.search(prompt)
845 answer = answer[:1].upper()
846 os.unlink(temp_filename)
858 reason_filename = pkg.changes_file[:-8] + ".reason"
859 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
861 # Move all the files into the reject directory
862 reject_files = pkg.files.keys() + [pkg.changes_file]
863 self.force_reject(reject_files)
865 # If we fail here someone is probably trying to exploit the race
866 # so let's just raise an exception ...
867 if os.path.exists(reason_filename):
868 os.unlink(reason_filename)
869 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
872 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
873 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
874 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
875 os.write(reason_fd, reject_message)
876 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
878 # Build up the rejection email
879 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
881 Subst["__REJECTOR_ADDRESS__"] = user_email_address
882 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
883 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
884 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
885 # Write the rejection email out as the <foo>.reason file
886 os.write(reason_fd, reject_mail_message)
890 # Send the rejection mail if appropriate
891 if not Cnf["Dinstall::Options::No-Mail"]:
892 utils.send_mail(reject_mail_message)
894 self.Logger.log(["rejected", pkg.changes_file])
897 ################################################################################
899 def source_exists (self, package, source_version, suites = ["any"]):
901 Ensure that source exists somewhere in the archive for the binary
902 upload being processed.
903 1. exact match => 1.0-3
904 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
906 @type package: string
907 @param package: package source name
909 @type source_version: string
910 @param source_version: expected source version
913 @param suites: list of suites to check in, default I{any}
916 @return: returns 1 if a source with expected version is found, otherwise 0
922 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
925 # source must exist in suite X, or in some other suite that's
926 # mapped to X, recursively... silent-maps are counted too,
927 # unreleased-maps aren't.
928 maps = self.Cnf.ValueList("SuiteMappings")[:]
930 maps = [ m.split() for m in maps ]
931 maps = [ (x[1], x[2]) for x in maps
932 if x[0] == "map" or x[0] == "silent-map" ]
935 if x[1] in s and x[0] not in s:
938 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
939 q = self.projectB.query(que)
941 # Reduce the query results to a list of version numbers
942 ql = [ i[0] for i in q.getresult() ]
945 if source_version in ql:
949 orig_source_version = re_bin_only_nmu.sub('', source_version)
950 if orig_source_version in ql:
958 ################################################################################
960 def in_override_p (self, package, component, suite, binary_type, file):
962 Check if a package already has override entries in the DB
964 @type package: string
965 @param package: package name
967 @type component: string
968 @param component: database id of the component, as returned by L{database.get_component_id}
971 @param suite: database id of the suite, as returned by L{database.get_suite_id}
973 @type binary_type: string
974 @param binary_type: type of the package
977 @param file: filename we check
979 @return: the database result. But noone cares anyway.
982 files = self.pkg.files
984 if binary_type == "": # must be source
987 file_type = binary_type
989 # Override suite name; used for example with proposed-updates
990 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
991 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
993 # Avoid <undef> on unknown distributions
994 suite_id = database.get_suite_id(suite)
997 component_id = database.get_component_id(component)
998 type_id = database.get_override_type_id(file_type)
1000 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
1001 % (package, suite_id, component_id, type_id))
1002 result = q.getresult()
1003 # If checking for a source package fall back on the binary override type
1004 if file_type == "dsc" and not result:
1005 deb_type_id = database.get_override_type_id("deb")
1006 udeb_type_id = database.get_override_type_id("udeb")
1007 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
1008 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
1009 result = q.getresult()
1011 # Remember the section and priority so we can check them later if appropriate
1013 files[file]["override section"] = result[0][0]
1014 files[file]["override priority"] = result[0][1]
1018 ################################################################################
1020 def reject (self, str, prefix="Rejected: "):
1022 Add C{str} to reject_message. Adds C{prefix}, by default "Rejected: "
1025 @param str: Reject text
1027 @type prefix: string
1028 @param prefix: Prefix text, default Rejected:
1032 # Unlike other rejects we add new lines first to avoid trailing
1033 # new lines when this message is passed back up to a caller.
1034 if self.reject_message:
1035 self.reject_message += "\n"
1036 self.reject_message += prefix + str
1038 ################################################################################
1040 def get_anyversion(self, query_result, suite):
1043 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1044 for (v, s) in query_result:
1045 if s in [ x.lower() for x in anysuite ]:
1046 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1050 ################################################################################
1052 def cross_suite_version_check(self, query_result, file, new_version,
1055 Ensure versions are newer than existing packages in target
1056 suites and that cross-suite version checking rules as
1057 set out in the conf file are satisfied.
1061 # Check versions for each target suite
1062 for target_suite in self.pkg.changes["distribution"].keys():
1063 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1064 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1065 # Enforce "must be newer than target suite" even if conffile omits it
1066 if target_suite not in must_be_newer_than:
1067 must_be_newer_than.append(target_suite)
1068 for entry in query_result:
1069 existent_version = entry[0]
1071 if suite in must_be_newer_than and sourceful and \
1072 apt_pkg.VersionCompare(new_version, existent_version) < 1:
1073 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1074 if suite in must_be_older_than and \
1075 apt_pkg.VersionCompare(new_version, existent_version) > -1:
1076 ch = self.pkg.changes
1078 if ch.get('distribution-version', {}).has_key(suite):
1079 # we really use the other suite, ignoring the conflicting one ...
1080 addsuite = ch["distribution-version"][suite]
1082 add_version = self.get_anyversion(query_result, addsuite)
1083 target_version = self.get_anyversion(query_result, target_suite)
1086 # not add_version can only happen if we map to a suite
1087 # that doesn't enhance the suite we're propup'ing from.
1088 # so "propup-ver x a b c; map a d" is a problem only if
1089 # d doesn't enhance a.
1091 # i think we could always propagate in this case, rather
1092 # than complaining. either way, this isn't a REJECT issue
1094 # And - we really should complain to the dorks who configured dak
1095 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
1096 self.pkg.changes.setdefault("propdistribution", {})
1097 self.pkg.changes["propdistribution"][addsuite] = 1
1099 elif not target_version:
1100 # not targets_version is true when the package is NEW
1101 # we could just stick with the "...old version..." REJECT
1102 # for this, I think.
1103 self.reject("Won't propogate NEW packages.")
1104 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1105 # propogation would be redundant. no need to reject though.
1106 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
1108 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1109 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1111 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
1112 self.pkg.changes.setdefault("propdistribution", {})
1113 self.pkg.changes["propdistribution"][addsuite] = 1
1117 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1119 ################################################################################
1121 def check_binary_against_db(self, file):
1125 self.reject_message = ""
1126 files = self.pkg.files
1128 # Ensure version is sane
1129 q = self.projectB.query("""
1130 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
1132 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
1133 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
1134 % (files[file]["package"],
1135 files[file]["architecture"]))
1136 self.cross_suite_version_check(q.getresult(), file,
1137 files[file]["version"], sourceful=False)
1139 # Check for any existing copies of the file
1140 q = self.projectB.query("""
1141 SELECT b.id FROM binaries b, architecture a
1142 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
1143 AND a.id = b.architecture"""
1144 % (files[file]["package"],
1145 files[file]["version"],
1146 files[file]["architecture"]))
1148 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
1150 return self.reject_message
1152 ################################################################################
1154 def check_source_against_db(self, file):
1157 self.reject_message = ""
1160 # Ensure version is sane
1161 q = self.projectB.query("""
1162 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
1163 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
1164 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
1167 return self.reject_message
1169 ################################################################################
1172 def check_dsc_against_db(self, file):
1175 @warning: NB: this function can remove entries from the 'files' index [if
1176 the .orig.tar.gz is a duplicate of the one in the archive]; if
1177 you're iterating over 'files' and call this function as part of
1178 the loop, be sure to add a check to the top of the loop to
1179 ensure you haven't just tried to dereference the deleted entry.
1182 self.reject_message = ""
1183 files = self.pkg.files
1184 dsc_files = self.pkg.dsc_files
1185 self.pkg.orig_tar_gz = None
1187 # Try and find all files mentioned in the .dsc. This has
1188 # to work harder to cope with the multiple possible
1189 # locations of an .orig.tar.gz.
1190 # The ordering on the select is needed to pick the newest orig
1191 # when it exists in multiple places.
1192 for dsc_file in dsc_files.keys():
1194 if files.has_key(dsc_file):
1195 actual_md5 = files[dsc_file]["md5sum"]
1196 actual_size = int(files[dsc_file]["size"])
1197 found = "%s in incoming" % (dsc_file)
1198 # Check the file does not already exist in the archive
1199 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
1201 # Strip out anything that isn't '%s' or '/%s$'
1203 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
1206 # "[dak] has not broken them. [dak] has fixed a
1207 # brokenness. Your crappy hack exploited a bug in
1210 # "(Come on! I thought it was always obvious that
1211 # one just doesn't release different files with
1212 # the same name and version.)"
1213 # -- ajk@ on d-devel@l.d.o
1216 # Ignore exact matches for .orig.tar.gz
1218 if dsc_file.endswith(".orig.tar.gz"):
1220 if files.has_key(dsc_file) and \
1221 int(files[dsc_file]["size"]) == int(i[0]) and \
1222 files[dsc_file]["md5sum"] == i[1]:
1223 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1225 self.pkg.orig_tar_gz = i[2] + i[3]
1229 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1230 elif dsc_file.endswith(".orig.tar.gz"):
1232 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1234 # Strip out anything that isn't '%s' or '/%s$'
1236 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1240 # Unfortunately, we may get more than one match here if,
1241 # for example, the package was in potato but had an -sa
1242 # upload in woody. So we need to choose the right one.
1244 # default to something sane in case we don't match any or have only one
1249 old_file = i[0] + i[1]
1250 old_file_fh = utils.open_file(old_file)
1251 actual_md5 = apt_pkg.md5sum(old_file_fh)
1253 actual_size = os.stat(old_file)[stat.ST_SIZE]
1254 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1257 old_file = x[0] + x[1]
1258 old_file_fh = utils.open_file(old_file)
1259 actual_md5 = apt_pkg.md5sum(old_file_fh)
1261 actual_size = os.stat(old_file)[stat.ST_SIZE]
1264 # need this for updating dsc_files in install()
1265 dsc_files[dsc_file]["files id"] = x[3]
1266 # See install() in process-accepted...
1267 self.pkg.orig_tar_id = x[3]
1268 self.pkg.orig_tar_gz = old_file
1269 self.pkg.orig_tar_location = x[4]
1271 # Not there? Check the queue directories...
1273 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1274 # See process_it() in 'dak process-unchecked' for explanation of this
1275 # in_unchecked check dropped by ajt 2007-08-28, how did that
1277 if os.path.exists(in_unchecked) and False:
1278 return (self.reject_message, in_unchecked)
1280 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1281 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1282 if os.path.exists(in_otherdir):
1283 in_otherdir_fh = utils.open_file(in_otherdir)
1284 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1285 in_otherdir_fh.close()
1286 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1288 self.pkg.orig_tar_gz = in_otherdir
1291 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1292 self.pkg.orig_tar_gz = -1
1295 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1297 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1298 self.reject("md5sum for %s doesn't match %s." % (found, file))
1299 if actual_size != int(dsc_files[dsc_file]["size"]):
1300 self.reject("size for %s doesn't match %s." % (found, file))
1302 return (self.reject_message, None)
1304 def do_query(self, query):
1306 Executes a database query. Writes statistics / timing to stderr.
1309 @param query: database query string, passed unmodified
1313 @warning: The query is passed B{unmodified}, so be careful what you use this for.
1315 sys.stderr.write("query: \"%s\" ... " % (query))
1316 before = time.time()
1317 r = self.projectB.query(query)
1318 time_diff = time.time()-before
1319 sys.stderr.write("took %.3f seconds.\n" % (time_diff))