5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
40 from dak_exceptions import *
41 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
45 ###############################################################################
47 # Determine what parts in a .changes are NEW
49 def determine_new(changes, files, projectB, warn=1):
51 Determine what parts in a C{changes} file are NEW.
53 @type changes: Upload.Pkg.changes dict
54 @param changes: Changes dictionary
56 @type files: Upload.Pkg.files dict
57 @param files: Files dictionary
59 @type projectB: pgobject
60 @param projectB: DB handle
63 @param warn: Warn if overrides are added for (old)stable
66 @return: dictionary of NEW components.
71 # Build up a list of potentially new things
72 for file_entry in files.keys():
74 # Skip byhand elements
75 if f["type"] == "byhand":
78 priority = f["priority"]
79 section = f["section"]
80 file_type = get_type(f)
81 component = f["component"]
83 if file_type == "dsc":
85 if not new.has_key(pkg):
87 new[pkg]["priority"] = priority
88 new[pkg]["section"] = section
89 new[pkg]["type"] = file_type
90 new[pkg]["component"] = component
91 new[pkg]["files"] = []
93 old_type = new[pkg]["type"]
94 if old_type != file_type:
95 # source gets trumped by deb or udeb
97 new[pkg]["priority"] = priority
98 new[pkg]["section"] = section
99 new[pkg]["type"] = file_type
100 new[pkg]["component"] = component
101 new[pkg]["files"].append(file_entry)
102 if f.has_key("othercomponents"):
103 new[pkg]["othercomponents"] = f["othercomponents"]
105 for suite in changes["suite"].keys():
106 suite_id = database.get_suite_id(suite)
107 for pkg in new.keys():
108 component_id = database.get_component_id(new[pkg]["component"])
109 type_id = database.get_override_type_id(new[pkg]["type"])
110 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
113 for file_entry in new[pkg]["files"]:
114 if files[file_entry].has_key("new"):
115 del files[file_entry]["new"]
119 if changes["suite"].has_key("stable"):
120 print "WARNING: overrides will be added for stable!"
121 if changes["suite"].has_key("oldstable"):
122 print "WARNING: overrides will be added for OLDstable!"
123 for pkg in new.keys():
124 if new[pkg].has_key("othercomponents"):
125 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
129 ################################################################################
133 Get the file type of C{file}
136 @param file: file entry
143 if file.has_key("dbtype"):
144 file_type = file["dbtype"]
145 elif file["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
148 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
150 # Validate the override type
151 type_id = database.get_override_type_id(file_type)
153 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
157 ################################################################################
161 def check_valid(new):
163 Check if section and priority for NEW packages exist in database.
164 Additionally does sanity checks:
165 - debian-installer packages have to be udeb (or source)
166 - non debian-installer packages can not be udeb
167 - source priority can only be assigned to dsc file types
170 @param new: Dict of new packages with their section, priority and type.
173 for pkg in new.keys():
174 section = new[pkg]["section"]
175 priority = new[pkg]["priority"]
176 file_type = new[pkg]["type"]
177 new[pkg]["section id"] = database.get_section_id(section)
178 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
180 di = section.find("debian-installer") != -1
181 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
182 new[pkg]["section id"] = -1
183 if (priority == "source" and file_type != "dsc") or \
184 (priority != "source" and file_type == "dsc"):
185 new[pkg]["priority id"] = -1
188 ###############################################################################
191 """ Convenience wrapper to carry around all the package information """
192 def __init__(self, **kwds):
193 self.__dict__.update(kwds)
195 def update(self, **kwds):
196 self.__dict__.update(kwds)
198 ###############################################################################
202 Everything that has to do with an upload processed.
205 def __init__(self, Cnf):
207 Initialize various variables and the global substitution template mappings.
208 Also connect to the DB and initialize the Database module.
212 self.accept_count = 0
213 self.accept_bytes = 0L
214 self.reject_message = ""
215 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
216 legacy_source_untouchable = {})
218 # Initialize the substitution template mapping global
219 Subst = self.Subst = {}
220 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
221 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
222 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
223 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
225 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
226 database.init(Cnf, self.projectB)
228 ###########################################################################
230 def init_vars (self):
231 """ Reset a number of entries from our Pkg object. """
232 self.pkg.changes.clear()
234 self.pkg.files.clear()
235 self.pkg.dsc_files.clear()
236 self.pkg.legacy_source_untouchable.clear()
237 self.pkg.orig_tar_id = None
238 self.pkg.orig_tar_location = ""
239 self.pkg.orig_tar_gz = None
241 ###########################################################################
243 def update_vars (self):
245 Update our Pkg object by reading a previously created cPickle .dak dumpfile.
247 dump_filename = self.pkg.changes_file[:-8]+".dak"
248 dump_file = utils.open_file(dump_filename)
249 p = cPickle.Unpickler(dump_file)
251 self.pkg.changes.update(p.load())
252 self.pkg.dsc.update(p.load())
253 self.pkg.files.update(p.load())
254 self.pkg.dsc_files.update(p.load())
255 self.pkg.legacy_source_untouchable.update(p.load())
257 self.pkg.orig_tar_id = p.load()
258 self.pkg.orig_tar_location = p.load()
262 ###########################################################################
265 def dump_vars(self, dest_dir):
267 Dump our Pkg object into a cPickle file.
269 @type dest_dir: string
270 @param dest_dir: Path where the dumpfile should be stored
272 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
273 there's some idea of what process-accepted & process-new use from
274 process-unchecked. (JT)
278 changes = self.pkg.changes
280 files = self.pkg.files
281 dsc_files = self.pkg.dsc_files
282 legacy_source_untouchable = self.pkg.legacy_source_untouchable
283 orig_tar_id = self.pkg.orig_tar_id
284 orig_tar_location = self.pkg.orig_tar_location
286 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
287 dump_file = utils.open_file(dump_filename, 'w')
289 os.chmod(dump_filename, 0664)
291 # chmod may fail when the dumpfile is not owned by the user
292 # invoking dak (like e.g. when NEW is processed by a member
294 if errno.errorcode[e.errno] == 'EPERM':
295 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
296 # security precaution, should never happen unless a weird
297 # umask is set anywhere
298 if perms & stat.S_IWOTH:
299 utils.fubar("%s is world writable and chmod failed." % \
301 # ignore the failed chmod otherwise as the file should
302 # already have the right privileges and is just, at worst,
303 # unreadable for world
307 p = cPickle.Pickler(dump_file, 1)
314 for file_entry in files.keys():
315 d_files[file_entry] = {}
316 for i in [ "package", "version", "architecture", "type", "size",
317 "md5sum", "sha1sum", "sha256sum", "component",
318 "location id", "source package", "source version",
319 "maintainer", "dbtype", "files id", "new",
320 "section", "priority", "othercomponents",
321 "pool name", "original component" ]:
322 if files[file_entry].has_key(i):
323 d_files[file_entry][i] = files[file_entry][i]
325 # Mandatory changes fields
326 for i in [ "distribution", "source", "architecture", "version",
327 "maintainer", "urgency", "fingerprint", "changedby822",
328 "changedby2047", "changedbyname", "maintainer822",
329 "maintainer2047", "maintainername", "maintaineremail",
330 "closes", "changes" ]:
331 d_changes[i] = changes[i]
332 # Optional changes fields
333 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
335 if changes.has_key(i):
336 d_changes[i] = changes[i]
338 for i in [ "source", "version", "maintainer", "fingerprint",
339 "uploaders", "bts changelog", "dm-upload-allowed" ]:
343 for file_entry in dsc_files.keys():
344 d_dsc_files[file_entry] = {}
345 # Mandatory dsc_files fields
346 for i in [ "size", "md5sum" ]:
347 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
348 # Optional dsc_files fields
349 for i in [ "files id" ]:
350 if dsc_files[file_entry].has_key(i):
351 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
353 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
354 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
358 ###########################################################################
360 # Set up the per-package template substitution mappings
362 def update_subst (self, reject_message = ""):
363 """ Set up the per-package template substitution mappings """
366 changes = self.pkg.changes
367 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
368 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
369 changes["architecture"] = { "Unknown" : "" }
370 # and maintainer2047 may not exist.
371 if not changes.has_key("maintainer2047"):
372 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
374 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
375 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
376 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
378 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
379 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
380 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
381 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
382 changes["maintainer2047"])
383 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
385 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
386 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
387 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
389 if "sponsoremail" in changes:
390 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
392 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
393 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
395 # Apply any global override of the Maintainer field
396 if self.Cnf.get("Dinstall::OverrideMaintainer"):
397 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
398 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
400 Subst["__REJECT_MESSAGE__"] = reject_message
401 Subst["__SOURCE__"] = changes.get("source", "Unknown")
402 Subst["__VERSION__"] = changes.get("version", "Unknown")
404 ###########################################################################
406 def build_summaries(self):
407 """ Build a summary of changes the upload introduces. """
408 changes = self.pkg.changes
409 files = self.pkg.files
411 byhand = summary = new = ""
413 # changes["distribution"] may not exist in corner cases
414 # (e.g. unreadable changes files)
415 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
416 changes["distribution"] = {}
419 file_keys = files.keys()
421 for file_entry in file_keys:
422 if files[file_entry].has_key("byhand"):
424 summary += file_entry + " byhand\n"
425 elif files[file_entry].has_key("new"):
427 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
428 if files[file_entry].has_key("othercomponents"):
429 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
430 if files[file_entry]["type"] == "deb":
431 deb_fh = utils.open_file(file_entry)
432 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
435 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
436 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
437 summary += file_entry + "\n to " + destination + "\n"
438 if not files[file_entry].has_key("type"):
439 files[file_entry]["type"] = "unknown"
440 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
441 # (queue/unchecked), there we have override entries already, use them
442 # (process-new), there we dont have override entries, use the newly generated ones.
443 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
444 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
445 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
447 short_summary = summary
449 # This is for direport's benefit...
450 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
453 summary += "Changes: " + f
455 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
457 summary += self.announce(short_summary, 0)
459 return (summary, short_summary)
461 ###########################################################################
463 def close_bugs (self, summary, action):
465 Send mail to close bugs as instructed by the closes field in the changes file.
466 Also add a line to summary if any work was done.
468 @type summary: string
469 @param summary: summary text, as given by L{build_summaries}
472 @param action: Set to false no real action will be done.
475 @return: summary. If action was taken, extended by the list of closed bugs.
478 changes = self.pkg.changes
482 bugs = changes["closes"].keys()
488 summary += "Closing bugs: "
490 summary += "%s " % (bug)
492 Subst["__BUG_NUMBER__"] = bug
493 if changes["distribution"].has_key("stable"):
494 Subst["__STABLE_WARNING__"] = """
495 Note that this package is not part of the released stable Debian
496 distribution. It may have dependencies on other unreleased software,
497 or other instabilities. Please take care if you wish to install it.
498 The update will eventually make its way into the next released Debian
501 Subst["__STABLE_WARNING__"] = ""
502 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
503 utils.send_mail (mail_message)
505 self.Logger.log(["closing bugs"]+bugs)
510 ###########################################################################
512 def announce (self, short_summary, action):
514 Send an announce mail about a new upload.
516 @type short_summary: string
517 @param short_summary: Short summary text to include in the mail
520 @param action: Set to false no real action will be done.
523 @return: Textstring about action taken.
528 changes = self.pkg.changes
530 # Only do announcements for source uploads with a recent dpkg-dev installed
531 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
536 Subst["__SHORT_SUMMARY__"] = short_summary
538 for dist in changes["distribution"].keys():
539 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
540 if announce_list == "" or lists_done.has_key(announce_list):
542 lists_done[announce_list] = 1
543 summary += "Announcing to %s\n" % (announce_list)
546 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
547 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
548 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
549 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
550 utils.send_mail (mail_message)
552 if Cnf.FindB("Dinstall::CloseBugs"):
553 summary = self.close_bugs(summary, action)
557 ###########################################################################
559 def accept (self, summary, short_summary):
563 This moves all files referenced from the .changes into the I{accepted}
564 queue, sends the accepted mail, announces to lists, closes bugs and
565 also checks for override disparities. If enabled it will write out
566 the version history for the BTS Version Tracking and will finally call
569 @type summary: string
570 @param summary: Summary text
572 @type short_summary: string
573 @param short_summary: Short summary
579 files = self.pkg.files
580 changes = self.pkg.changes
581 changes_file = self.pkg.changes_file
585 self.Logger.log(["Accepting changes",changes_file])
587 self.dump_vars(Cnf["Dir::Queue::Accepted"])
589 # Move all the files into the accepted directory
590 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
591 file_keys = files.keys()
592 for file_entry in file_keys:
593 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
594 self.accept_bytes += float(files[file_entry]["size"])
595 self.accept_count += 1
597 # Send accept mail, announce to lists, close bugs and check for
598 # override disparities
599 if not Cnf["Dinstall::Options::No-Mail"]:
600 Subst["__SUITE__"] = ""
601 Subst["__SUMMARY__"] = summary
602 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
603 utils.send_mail(mail_message)
604 self.announce(short_summary, 1)
607 ## Helper stuff for DebBugs Version Tracking
608 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
609 # ??? once queue/* is cleared on *.d.o and/or reprocessed
610 # the conditionalization on dsc["bts changelog"] should be
613 # Write out the version history from the changelog
614 if changes["architecture"].has_key("source") and \
615 dsc.has_key("bts changelog"):
617 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
618 version_history = os.fdopen(fd, 'w')
619 version_history.write(dsc["bts changelog"])
620 version_history.close()
621 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
622 changes_file[:-8]+".versions")
623 os.rename(temp_filename, filename)
624 os.chmod(filename, 0644)
626 # Write out the binary -> source mapping.
627 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
628 debinfo = os.fdopen(fd, 'w')
629 for file_entry in file_keys:
630 f = files[file_entry]
631 if f["type"] == "deb":
632 line = " ".join([f["package"], f["version"],
633 f["architecture"], f["source package"],
634 f["source version"]])
635 debinfo.write(line+"\n")
637 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
638 changes_file[:-8]+".debinfo")
639 os.rename(temp_filename, filename)
640 os.chmod(filename, 0644)
642 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
644 ###########################################################################
646 def queue_build (self, queue, path):
648 Prepare queue_build database table used for incoming autobuild support.
651 @param queue: queue name
654 @param path: path for the queue file entries/link destinations
659 files = self.pkg.files
660 changes = self.pkg.changes
661 changes_file = self.pkg.changes_file
663 file_keys = files.keys()
665 ## Special support to enable clean auto-building of queued packages
666 queue_id = database.get_or_set_queue_id(queue)
668 self.projectB.query("BEGIN WORK")
669 for suite in changes["distribution"].keys():
670 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
672 suite_id = database.get_suite_id(suite)
673 dest_dir = Cnf["Dir::QueueBuild"]
674 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
675 dest_dir = os.path.join(dest_dir, suite)
676 for file_entry in file_keys:
677 src = os.path.join(path, file_entry)
678 dest = os.path.join(dest_dir, file_entry)
679 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
680 # Copy it since the original won't be readable by www-data
681 utils.copy(src, dest)
683 # Create a symlink to it
684 os.symlink(src, dest)
685 # Add it to the list of packages for later processing by apt-ftparchive
686 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
687 # If the .orig.tar.gz is in the pool, create a symlink to
688 # it (if one doesn't already exist)
689 if self.pkg.orig_tar_id:
690 # Determine the .orig.tar.gz file name
691 for dsc_file in self.pkg.dsc_files.keys():
692 if dsc_file.endswith(".orig.tar.gz"):
694 dest = os.path.join(dest_dir, filename)
695 # If it doesn't exist, create a symlink
696 if not os.path.exists(dest):
697 # Find the .orig.tar.gz in the pool
698 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
701 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
702 src = os.path.join(ql[0][0], ql[0][1])
703 os.symlink(src, dest)
704 # Add it to the list of packages for later processing by apt-ftparchive
705 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
706 # if it does, update things to ensure it's not removed prematurely
708 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
710 self.projectB.query("COMMIT WORK")
712 ###########################################################################
714 def check_override (self):
716 Checks override entries for validity. Mails "Override disparity" warnings,
717 if that feature is enabled.
719 Abandons the check if
720 - this is a non-sourceful upload
721 - override disparity checks are disabled
722 - mail sending is disabled
726 changes = self.pkg.changes
727 files = self.pkg.files
730 # Abandon the check if:
731 # a) it's a non-sourceful upload
732 # b) override disparity checks have been disabled
733 # c) we're not sending mail
734 if not changes["architecture"].has_key("source") or \
735 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
736 Cnf["Dinstall::Options::No-Mail"]:
740 file_keys = files.keys()
742 for file_entry in file_keys:
743 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
744 section = files[file_entry]["section"]
745 override_section = files[file_entry]["override section"]
746 if section.lower() != override_section.lower() and section != "-":
747 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
748 priority = files[file_entry]["priority"]
749 override_priority = files[file_entry]["override priority"]
750 if priority != override_priority and priority != "-":
751 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
756 Subst["__SUMMARY__"] = summary
757 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
758 utils.send_mail(mail_message)
760 ###########################################################################
762 def force_reject (self, files):
764 Forcefully move files from the current directory to the
765 reject directory. If any file already exists in the reject
766 directory it will be moved to the morgue to make way for
770 @param files: file dictionary
776 for file_entry in files:
777 # Skip any files which don't exist or which we don't have permission to copy.
778 if os.access(file_entry,os.R_OK) == 0:
780 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
782 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
784 # File exists? Let's try and move it to the morgue
785 if errno.errorcode[e.errno] == 'EEXIST':
786 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
788 morgue_file = utils.find_next_free(morgue_file)
789 except NoFreeFilenameError:
790 # Something's either gone badly Pete Tong, or
791 # someone is trying to exploit us.
792 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
794 utils.move(dest_file, morgue_file, perms=0660)
796 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
799 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
803 # If we got here, we own the destination file, so we can
804 # safely overwrite it.
805 utils.move(file_entry, dest_file, 1, perms=0660)
808 ###########################################################################
810 def do_reject (self, manual = 0, reject_message = ""):
812 Reject an upload. If called without a reject message or C{manual} is
813 true, spawn an editor so the user can write one.
816 @param manual: manual or automated rejection
818 @type reject_message: string
819 @param reject_message: A reject message
824 # If we weren't given a manual rejection message, spawn an
825 # editor so the user can add one in...
826 if manual and not reject_message:
827 (fd, temp_filename) = utils.temp_filename()
828 editor = os.environ.get("EDITOR","vi")
831 os.system("%s %s" % (editor, temp_filename))
832 temp_fh = utils.open_file(temp_filename)
833 reject_message = "".join(temp_fh.readlines())
835 print "Reject message:"
836 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
837 prompt = "[R]eject, Edit, Abandon, Quit ?"
839 while prompt.find(answer) == -1:
840 answer = utils.our_raw_input(prompt)
841 m = re_default_answer.search(prompt)
844 answer = answer[:1].upper()
845 os.unlink(temp_filename)
857 reason_filename = pkg.changes_file[:-8] + ".reason"
858 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
860 # Move all the files into the reject directory
861 reject_files = pkg.files.keys() + [pkg.changes_file]
862 self.force_reject(reject_files)
864 # If we fail here someone is probably trying to exploit the race
865 # so let's just raise an exception ...
866 if os.path.exists(reason_filename):
867 os.unlink(reason_filename)
868 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
871 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
872 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
873 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
874 os.write(reason_fd, reject_message)
875 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
877 # Build up the rejection email
878 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
880 Subst["__REJECTOR_ADDRESS__"] = user_email_address
881 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
882 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
883 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
884 # Write the rejection email out as the <foo>.reason file
885 os.write(reason_fd, reject_mail_message)
889 # Send the rejection mail if appropriate
890 if not Cnf["Dinstall::Options::No-Mail"]:
891 utils.send_mail(reject_mail_message)
893 self.Logger.log(["rejected", pkg.changes_file])
896 ################################################################################
898 def source_exists (self, package, source_version, suites = ["any"]):
900 Ensure that source exists somewhere in the archive for the binary
901 upload being processed.
902 1. exact match => 1.0-3
903 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
905 @type package: string
906 @param package: package source name
908 @type source_version: string
909 @param source_version: expected source version
912 @param suites: list of suites to check in, default I{any}
915 @return: returns 1 if a source with expected version is found, otherwise 0
921 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
924 # source must exist in suite X, or in some other suite that's
925 # mapped to X, recursively... silent-maps are counted too,
926 # unreleased-maps aren't.
927 maps = self.Cnf.ValueList("SuiteMappings")[:]
929 maps = [ m.split() for m in maps ]
930 maps = [ (x[1], x[2]) for x in maps
931 if x[0] == "map" or x[0] == "silent-map" ]
934 if x[1] in s and x[0] not in s:
937 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
938 q = self.projectB.query(que)
940 # Reduce the query results to a list of version numbers
941 ql = [ i[0] for i in q.getresult() ]
944 if source_version in ql:
948 orig_source_version = re_bin_only_nmu.sub('', source_version)
949 if orig_source_version in ql:
957 ################################################################################
959 def in_override_p (self, package, component, suite, binary_type, file):
961 Check if a package already has override entries in the DB
963 @type package: string
964 @param package: package name
966 @type component: string
967 @param component: database id of the component, as returned by L{database.get_component_id}
970 @param suite: database id of the suite, as returned by L{database.get_suite_id}
972 @type binary_type: string
973 @param binary_type: type of the package
976 @param file: filename we check
978 @return: the database result. But noone cares anyway.
981 files = self.pkg.files
983 if binary_type == "": # must be source
986 file_type = binary_type
988 # Override suite name; used for example with proposed-updates
989 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
990 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
992 # Avoid <undef> on unknown distributions
993 suite_id = database.get_suite_id(suite)
996 component_id = database.get_component_id(component)
997 type_id = database.get_override_type_id(file_type)
999 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
1000 % (package, suite_id, component_id, type_id))
1001 result = q.getresult()
1002 # If checking for a source package fall back on the binary override type
1003 if file_type == "dsc" and not result:
1004 deb_type_id = database.get_override_type_id("deb")
1005 udeb_type_id = database.get_override_type_id("udeb")
1006 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
1007 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
1008 result = q.getresult()
1010 # Remember the section and priority so we can check them later if appropriate
1012 files[file]["override section"] = result[0][0]
1013 files[file]["override priority"] = result[0][1]
1017 ################################################################################
1019 def reject (self, str, prefix="Rejected: "):
1021 Add C{str} to reject_message. Adds C{prefix}, by default "Rejected: "
1024 @param str: Reject text
1026 @type prefix: string
1027 @param prefix: Prefix text, default Rejected:
1031 # Unlike other rejects we add new lines first to avoid trailing
1032 # new lines when this message is passed back up to a caller.
1033 if self.reject_message:
1034 self.reject_message += "\n"
1035 self.reject_message += prefix + str
1037 ################################################################################
1039 def get_anyversion(self, query_result, suite):
1042 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1043 for (v, s) in query_result:
1044 if s in [ x.lower() for x in anysuite ]:
1045 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1049 ################################################################################
1051 def cross_suite_version_check(self, query_result, file, new_version,
1054 Ensure versions are newer than existing packages in target
1055 suites and that cross-suite version checking rules as
1056 set out in the conf file are satisfied.
1060 # Check versions for each target suite
1061 for target_suite in self.pkg.changes["distribution"].keys():
1062 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1063 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1064 # Enforce "must be newer than target suite" even if conffile omits it
1065 if target_suite not in must_be_newer_than:
1066 must_be_newer_than.append(target_suite)
1067 for entry in query_result:
1068 existent_version = entry[0]
1070 if suite in must_be_newer_than and sourceful and \
1071 apt_pkg.VersionCompare(new_version, existent_version) < 1:
1072 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1073 if suite in must_be_older_than and \
1074 apt_pkg.VersionCompare(new_version, existent_version) > -1:
1075 ch = self.pkg.changes
1077 if ch.get('distribution-version', {}).has_key(suite):
1078 # we really use the other suite, ignoring the conflicting one ...
1079 addsuite = ch["distribution-version"][suite]
1081 add_version = self.get_anyversion(query_result, addsuite)
1082 target_version = self.get_anyversion(query_result, target_suite)
1085 # not add_version can only happen if we map to a suite
1086 # that doesn't enhance the suite we're propup'ing from.
1087 # so "propup-ver x a b c; map a d" is a problem only if
1088 # d doesn't enhance a.
1090 # i think we could always propagate in this case, rather
1091 # than complaining. either way, this isn't a REJECT issue
1093 # And - we really should complain to the dorks who configured dak
1094 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
1095 self.pkg.changes.setdefault("propdistribution", {})
1096 self.pkg.changes["propdistribution"][addsuite] = 1
1098 elif not target_version:
1099 # not targets_version is true when the package is NEW
1100 # we could just stick with the "...old version..." REJECT
1101 # for this, I think.
1102 self.reject("Won't propogate NEW packages.")
1103 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1104 # propogation would be redundant. no need to reject though.
1105 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
1107 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1108 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1110 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
1111 self.pkg.changes.setdefault("propdistribution", {})
1112 self.pkg.changes["propdistribution"][addsuite] = 1
1116 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1118 ################################################################################
1120 def check_binary_against_db(self, file):
1124 self.reject_message = ""
1125 files = self.pkg.files
1127 # Ensure version is sane
1128 q = self.projectB.query("""
1129 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
1131 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
1132 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
1133 % (files[file]["package"],
1134 files[file]["architecture"]))
1135 self.cross_suite_version_check(q.getresult(), file,
1136 files[file]["version"], sourceful=False)
1138 # Check for any existing copies of the file
1139 q = self.projectB.query("""
1140 SELECT b.id FROM binaries b, architecture a
1141 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
1142 AND a.id = b.architecture"""
1143 % (files[file]["package"],
1144 files[file]["version"],
1145 files[file]["architecture"]))
1147 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
1149 return self.reject_message
1151 ################################################################################
1153 def check_source_against_db(self, file):
1156 self.reject_message = ""
1159 # Ensure version is sane
1160 q = self.projectB.query("""
1161 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
1162 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
1163 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
1166 return self.reject_message
1168 ################################################################################
1171 def check_dsc_against_db(self, file):
1174 @warning: NB: this function can remove entries from the 'files' index [if
1175 the .orig.tar.gz is a duplicate of the one in the archive]; if
1176 you're iterating over 'files' and call this function as part of
1177 the loop, be sure to add a check to the top of the loop to
1178 ensure you haven't just tried to dereference the deleted entry.
1181 self.reject_message = ""
1182 files = self.pkg.files
1183 dsc_files = self.pkg.dsc_files
1184 legacy_source_untouchable = self.pkg.legacy_source_untouchable
1185 self.pkg.orig_tar_gz = None
1187 # Try and find all files mentioned in the .dsc. This has
1188 # to work harder to cope with the multiple possible
1189 # locations of an .orig.tar.gz.
1190 # The ordering on the select is needed to pick the newest orig
1191 # when it exists in multiple places.
1192 for dsc_file in dsc_files.keys():
1194 if files.has_key(dsc_file):
1195 actual_md5 = files[dsc_file]["md5sum"]
1196 actual_size = int(files[dsc_file]["size"])
1197 found = "%s in incoming" % (dsc_file)
1198 # Check the file does not already exist in the archive
1199 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
1201 # Strip out anything that isn't '%s' or '/%s$'
1203 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
1206 # "[dak] has not broken them. [dak] has fixed a
1207 # brokenness. Your crappy hack exploited a bug in
1210 # "(Come on! I thought it was always obvious that
1211 # one just doesn't release different files with
1212 # the same name and version.)"
1213 # -- ajk@ on d-devel@l.d.o
1216 # Ignore exact matches for .orig.tar.gz
1218 if dsc_file.endswith(".orig.tar.gz"):
1220 if files.has_key(dsc_file) and \
1221 int(files[dsc_file]["size"]) == int(i[0]) and \
1222 files[dsc_file]["md5sum"] == i[1]:
1223 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1225 self.pkg.orig_tar_gz = i[2] + i[3]
1229 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1230 elif dsc_file.endswith(".orig.tar.gz"):
1232 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1234 # Strip out anything that isn't '%s' or '/%s$'
1236 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1240 # Unfortunately, we may get more than one match here if,
1241 # for example, the package was in potato but had an -sa
1242 # upload in woody. So we need to choose the right one.
1244 # default to something sane in case we don't match any or have only one
1249 old_file = i[0] + i[1]
1250 old_file_fh = utils.open_file(old_file)
1251 actual_md5 = apt_pkg.md5sum(old_file_fh)
1253 actual_size = os.stat(old_file)[stat.ST_SIZE]
1254 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1257 legacy_source_untouchable[i[3]] = ""
1259 old_file = x[0] + x[1]
1260 old_file_fh = utils.open_file(old_file)
1261 actual_md5 = apt_pkg.md5sum(old_file_fh)
1263 actual_size = os.stat(old_file)[stat.ST_SIZE]
1266 # need this for updating dsc_files in install()
1267 dsc_files[dsc_file]["files id"] = x[3]
1268 # See install() in process-accepted...
1269 self.pkg.orig_tar_id = x[3]
1270 self.pkg.orig_tar_gz = old_file
1271 if suite_type == "legacy" or suite_type == "legacy-mixed":
1272 self.pkg.orig_tar_location = "legacy"
1274 self.pkg.orig_tar_location = x[4]
1276 # Not there? Check the queue directories...
1278 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1279 # See process_it() in 'dak process-unchecked' for explanation of this
1280 # in_unchecked check dropped by ajt 2007-08-28, how did that
1282 if os.path.exists(in_unchecked) and False:
1283 return (self.reject_message, in_unchecked)
1285 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1286 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1287 if os.path.exists(in_otherdir):
1288 in_otherdir_fh = utils.open_file(in_otherdir)
1289 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1290 in_otherdir_fh.close()
1291 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1293 self.pkg.orig_tar_gz = in_otherdir
1296 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1297 self.pkg.orig_tar_gz = -1
1300 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1302 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1303 self.reject("md5sum for %s doesn't match %s." % (found, file))
1304 if actual_size != int(dsc_files[dsc_file]["size"]):
1305 self.reject("size for %s doesn't match %s." % (found, file))
1307 return (self.reject_message, None)
1309 def do_query(self, query):
1311 Executes a database query. Writes statistics / timing to stderr.
1314 @param query: database query string, passed unmodified
1318 @warning: The query is passed B{unmodified}, so be careful what you use this for.
1320 sys.stderr.write("query: \"%s\" ... " % (query))
1321 before = time.time()
1322 r = self.projectB.query(query)
1323 time_diff = time.time()-before
1324 sys.stderr.write("took %.3f seconds.\n" % (time_diff))