3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ################################################################################
37 # Determine what parts in a .changes are NEW
39 def determine_new(changes, files, projectB, warn=1):
42 # Build up a list of potentially new things
43 for file in files.keys():
45 # Skip byhand elements
46 if f["type"] == "byhand":
49 priority = f["priority"]
50 section = f["section"]
52 component = f["component"]
56 if not new.has_key(pkg):
58 new[pkg]["priority"] = priority
59 new[pkg]["section"] = section
60 new[pkg]["type"] = type
61 new[pkg]["component"] = component
62 new[pkg]["files"] = []
64 old_type = new[pkg]["type"]
66 # source gets trumped by deb or udeb
68 new[pkg]["priority"] = priority
69 new[pkg]["section"] = section
70 new[pkg]["type"] = type
71 new[pkg]["component"] = component
72 new[pkg]["files"].append(file)
73 if f.has_key("othercomponents"):
74 new[pkg]["othercomponents"] = f["othercomponents"]
76 for suite in changes["suite"].keys():
77 suite_id = database.get_suite_id(suite)
78 for pkg in new.keys():
79 component_id = database.get_component_id(new[pkg]["component"])
80 type_id = database.get_override_type_id(new[pkg]["type"])
81 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84 for file in new[pkg]["files"]:
85 if files[file].has_key("new"):
86 del files[file]["new"]
90 if changes["suite"].has_key("stable"):
91 print "WARNING: overrides will be added for stable!"
92 if changes["suite"].has_key("oldstable"):
93 print "WARNING: overrides will be added for OLDstable!"
94 for pkg in new.keys():
95 if new[pkg].has_key("othercomponents"):
96 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
100 ################################################################################
104 if f.has_key("dbtype"):
106 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
111 # Validate the override type
112 type_id = database.get_override_type_id(type)
114 utils.fubar("invalid type (%s) for new. Say wha?" % (type))
118 ################################################################################
120 # check if section/priority values are valid
122 def check_valid(new):
123 for pkg in new.keys():
124 section = new[pkg]["section"]
125 priority = new[pkg]["priority"]
126 type = new[pkg]["type"]
127 new[pkg]["section id"] = database.get_section_id(section)
128 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130 di = section.find("debian-installer") != -1
131 if (di and type != "udeb") or (not di and type == "udeb"):
132 new[pkg]["section id"] = -1
133 if (priority == "source" and type != "dsc") or \
134 (priority != "source" and type == "dsc"):
135 new[pkg]["priority id"] = -1
138 ###############################################################################
140 # Convenience wrapper to carry around all the package information in
143 def __init__(self, **kwds):
144 self.__dict__.update(kwds)
146 def update(self, **kwds):
147 self.__dict__.update(kwds)
149 ###############################################################################
153 def __init__(self, Cnf):
155 self.accept_count = 0
156 self.accept_bytes = 0L
157 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158 legacy_source_untouchable = {})
160 # Initialize the substitution template mapping global
161 Subst = self.Subst = {}
162 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168 database.init(Cnf, self.projectB)
170 ###########################################################################
172 def init_vars (self):
173 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174 exec "self.pkg.%s.clear();" % (i)
175 self.pkg.orig_tar_id = None
176 self.pkg.orig_tar_location = ""
177 self.pkg.orig_tar_gz = None
179 ###########################################################################
181 def update_vars (self):
182 dump_filename = self.pkg.changes_file[:-8]+".dak"
183 dump_file = utils.open_file(dump_filename)
184 p = cPickle.Unpickler(dump_file)
185 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186 exec "self.pkg.%s.update(p.load());" % (i)
187 for i in [ "orig_tar_id", "orig_tar_location" ]:
188 exec "self.pkg.%s = p.load();" % (i)
191 ###########################################################################
193 # This could just dump the dictionaries as is, but I'd like to
194 # avoid this so there's some idea of what process-accepted &
195 # process-new use from process-unchecked
197 def dump_vars(self, dest_dir):
198 for i in [ "changes", "dsc", "files", "dsc_files",
199 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200 exec "%s = self.pkg.%s;" % (i,i)
201 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202 dump_file = utils.open_file(dump_filename, 'w')
204 os.chmod(dump_filename, 0660)
206 if errno.errorcode[e.errno] == 'EPERM':
207 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208 if perms & stat.S_IROTH:
209 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
213 p = cPickle.Pickler(dump_file, 1)
214 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
217 for file in files.keys():
219 for i in [ "package", "version", "architecture", "type", "size",
220 "md5sum", "component", "location id", "source package",
221 "source version", "maintainer", "dbtype", "files id",
222 "new", "section", "priority", "othercomponents",
223 "pool name", "original component" ]:
224 if files[file].has_key(i):
225 d_files[file][i] = files[file][i]
227 # Mandatory changes fields
228 for i in [ "distribution", "source", "architecture", "version",
229 "maintainer", "urgency", "fingerprint", "changedby822",
230 "changedby2047", "changedbyname", "maintainer822",
231 "maintainer2047", "maintainername", "maintaineremail",
232 "closes", "changes" ]:
233 d_changes[i] = changes[i]
234 # Optional changes fields
235 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
237 if changes.has_key(i):
238 d_changes[i] = changes[i]
240 for i in [ "source", "version", "maintainer", "fingerprint",
241 "uploaders", "bts changelog", "dm-upload-allowed" ]:
245 for file in dsc_files.keys():
246 d_dsc_files[file] = {}
247 # Mandatory dsc_files fields
248 for i in [ "size", "md5sum" ]:
249 d_dsc_files[file][i] = dsc_files[file][i]
250 # Optional dsc_files fields
251 for i in [ "files id" ]:
252 if dsc_files[file].has_key(i):
253 d_dsc_files[file][i] = dsc_files[file][i]
255 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
256 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
260 ###########################################################################
262 # Set up the per-package template substitution mappings
264 def update_subst (self, reject_message = ""):
266 changes = self.pkg.changes
267 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
268 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
269 changes["architecture"] = { "Unknown" : "" }
270 # and maintainer2047 may not exist.
271 if not changes.has_key("maintainer2047"):
272 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
274 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
275 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
276 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
278 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
279 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
280 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
281 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
282 changes["maintainer2047"])
283 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
285 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
286 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
287 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
289 if "sponsoremail" in changes:
290 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
292 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
293 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
295 # Apply any global override of the Maintainer field
296 if self.Cnf.get("Dinstall::OverrideMaintainer"):
297 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
298 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
300 Subst["__REJECT_MESSAGE__"] = reject_message
301 Subst["__SOURCE__"] = changes.get("source", "Unknown")
302 Subst["__VERSION__"] = changes.get("version", "Unknown")
304 ###########################################################################
306 def build_summaries(self):
307 changes = self.pkg.changes
308 files = self.pkg.files
310 byhand = summary = new = ""
312 # changes["distribution"] may not exist in corner cases
313 # (e.g. unreadable changes files)
314 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
315 changes["distribution"] = {}
317 override_summary ="";
318 file_keys = files.keys()
320 for file in file_keys:
321 if files[file].has_key("byhand"):
323 summary += file + " byhand\n"
324 elif files[file].has_key("new"):
326 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
327 if files[file].has_key("othercomponents"):
328 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
329 if files[file]["type"] == "deb":
330 deb_fh = utils.open_file(file)
331 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
334 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
335 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
336 summary += file + "\n to " + destination + "\n"
337 if not files[file].has_key("type"):
338 files[file]["type"] = "unknown"
339 if files[file]["type"] in ["deb", "udeb", "dsc"]:
340 # (queue/unchecked), there we have override entries already, use them
341 # (process-new), there we dont have override entries, use the newly generated ones.
342 override_prio = files[file].get("override priority", files[file]["priority"])
343 override_sect = files[file].get("override section", files[file]["section"])
344 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
346 short_summary = summary
348 # This is for direport's benefit...
349 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
352 summary += "Changes: " + f
354 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
356 summary += self.announce(short_summary, 0)
358 return (summary, short_summary)
360 ###########################################################################
362 def close_bugs (self, summary, action):
363 changes = self.pkg.changes
367 bugs = changes["closes"].keys()
373 summary += "Closing bugs: "
375 summary += "%s " % (bug)
377 Subst["__BUG_NUMBER__"] = bug
378 if changes["distribution"].has_key("stable"):
379 Subst["__STABLE_WARNING__"] = """
380 Note that this package is not part of the released stable Debian
381 distribution. It may have dependencies on other unreleased software,
382 or other instabilities. Please take care if you wish to install it.
383 The update will eventually make its way into the next released Debian
386 Subst["__STABLE_WARNING__"] = ""
387 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
388 utils.send_mail (mail_message)
390 self.Logger.log(["closing bugs"]+bugs)
395 ###########################################################################
397 def announce (self, short_summary, action):
400 changes = self.pkg.changes
402 # Only do announcements for source uploads with a recent dpkg-dev installed
403 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
408 Subst["__SHORT_SUMMARY__"] = short_summary
410 for dist in changes["distribution"].keys():
411 list = Cnf.Find("Suite::%s::Announce" % (dist))
412 if list == "" or lists_done.has_key(list):
415 summary += "Announcing to %s\n" % (list)
418 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
419 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
420 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
421 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
422 utils.send_mail (mail_message)
424 if Cnf.FindB("Dinstall::CloseBugs"):
425 summary = self.close_bugs(summary, action)
429 ###########################################################################
431 def accept (self, summary, short_summary):
434 files = self.pkg.files
435 changes = self.pkg.changes
436 changes_file = self.pkg.changes_file
440 self.Logger.log(["Accepting changes",changes_file])
442 self.dump_vars(Cnf["Dir::Queue::Accepted"])
444 # Move all the files into the accepted directory
445 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
446 file_keys = files.keys()
447 for file in file_keys:
448 utils.move(file, Cnf["Dir::Queue::Accepted"])
449 self.accept_bytes += float(files[file]["size"])
450 self.accept_count += 1
452 # Send accept mail, announce to lists, close bugs and check for
453 # override disparities
454 if not Cnf["Dinstall::Options::No-Mail"]:
455 Subst["__SUITE__"] = ""
456 Subst["__SUMMARY__"] = summary
457 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
458 utils.send_mail(mail_message)
459 self.announce(short_summary, 1)
462 ## Helper stuff for DebBugs Version Tracking
463 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
464 # ??? once queue/* is cleared on *.d.o and/or reprocessed
465 # the conditionalization on dsc["bts changelog"] should be
468 # Write out the version history from the changelog
469 if changes["architecture"].has_key("source") and \
470 dsc.has_key("bts changelog"):
472 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
473 dotprefix=1, perms=0644)
474 version_history = utils.open_file(temp_filename, 'w')
475 version_history.write(dsc["bts changelog"])
476 version_history.close()
477 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
478 changes_file[:-8]+".versions")
479 os.rename(temp_filename, filename)
481 # Write out the binary -> source mapping.
482 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
483 dotprefix=1, perms=0644)
484 debinfo = utils.open_file(temp_filename, 'w')
485 for file in file_keys:
487 if f["type"] == "deb":
488 line = " ".join([f["package"], f["version"],
489 f["architecture"], f["source package"],
490 f["source version"]])
491 debinfo.write(line+"\n")
493 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
494 changes_file[:-8]+".debinfo")
495 os.rename(temp_filename, filename)
497 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
499 ###########################################################################
501 def queue_build (self, queue, path):
504 files = self.pkg.files
505 changes = self.pkg.changes
506 changes_file = self.pkg.changes_file
508 file_keys = files.keys()
510 ## Special support to enable clean auto-building of queued packages
511 queue_id = database.get_or_set_queue_id(queue)
513 self.projectB.query("BEGIN WORK")
514 for suite in changes["distribution"].keys():
515 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
517 suite_id = database.get_suite_id(suite)
518 dest_dir = Cnf["Dir::QueueBuild"]
519 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
520 dest_dir = os.path.join(dest_dir, suite)
521 for file in file_keys:
522 src = os.path.join(path, file)
523 dest = os.path.join(dest_dir, file)
524 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
525 # Copy it since the original won't be readable by www-data
526 utils.copy(src, dest)
528 # Create a symlink to it
529 os.symlink(src, dest)
530 # Add it to the list of packages for later processing by apt-ftparchive
531 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
532 # If the .orig.tar.gz is in the pool, create a symlink to
533 # it (if one doesn't already exist)
534 if self.pkg.orig_tar_id:
535 # Determine the .orig.tar.gz file name
536 for dsc_file in self.pkg.dsc_files.keys():
537 if dsc_file.endswith(".orig.tar.gz"):
539 dest = os.path.join(dest_dir, filename)
540 # If it doesn't exist, create a symlink
541 if not os.path.exists(dest):
542 # Find the .orig.tar.gz in the pool
543 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
546 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
547 src = os.path.join(ql[0][0], ql[0][1])
548 os.symlink(src, dest)
549 # Add it to the list of packages for later processing by apt-ftparchive
550 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
551 # if it does, update things to ensure it's not removed prematurely
553 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
555 self.projectB.query("COMMIT WORK")
557 ###########################################################################
559 def check_override (self):
561 changes = self.pkg.changes
562 files = self.pkg.files
565 # Abandon the check if:
566 # a) it's a non-sourceful upload
567 # b) override disparity checks have been disabled
568 # c) we're not sending mail
569 if not changes["architecture"].has_key("source") or \
570 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
571 Cnf["Dinstall::Options::No-Mail"]:
575 file_keys = files.keys()
577 for file in file_keys:
578 if not files[file].has_key("new") and files[file]["type"] == "deb":
579 section = files[file]["section"]
580 override_section = files[file]["override section"]
581 if section.lower() != override_section.lower() and section != "-":
582 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
583 priority = files[file]["priority"]
584 override_priority = files[file]["override priority"]
585 if priority != override_priority and priority != "-":
586 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
591 Subst["__SUMMARY__"] = summary
592 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
593 utils.send_mail(mail_message)
595 ###########################################################################
597 def force_reject (self, files):
598 """Forcefully move files from the current directory to the
599 reject directory. If any file already exists in the reject
600 directory it will be moved to the morgue to make way for
606 # Skip any files which don't exist or which we don't have permission to copy.
607 if os.access(file,os.R_OK) == 0:
609 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
611 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
613 # File exists? Let's try and move it to the morgue
614 if errno.errorcode[e.errno] == 'EEXIST':
615 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
617 morgue_file = utils.find_next_free(morgue_file)
618 except utils.tried_too_hard_exc:
619 # Something's either gone badly Pete Tong, or
620 # someone is trying to exploit us.
621 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
623 utils.move(dest_file, morgue_file, perms=0660)
625 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
628 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
632 # If we got here, we own the destination file, so we can
633 # safely overwrite it.
634 utils.move(file, dest_file, 1, perms=0660)
637 ###########################################################################
639 def do_reject (self, manual = 0, reject_message = ""):
640 # If we weren't given a manual rejection message, spawn an
641 # editor so the user can add one in...
642 if manual and not reject_message:
643 temp_filename = utils.temp_filename()
644 editor = os.environ.get("EDITOR","vi")
647 os.system("%s %s" % (editor, temp_filename))
648 temp_fh = utils.open_file(temp_filename)
649 reject_message = "".join(temp_fh.readlines())
651 print "Reject message:"
652 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
653 prompt = "[R]eject, Edit, Abandon, Quit ?"
655 while prompt.find(answer) == -1:
656 answer = utils.our_raw_input(prompt)
657 m = re_default_answer.search(prompt)
660 answer = answer[:1].upper()
661 os.unlink(temp_filename)
673 reason_filename = pkg.changes_file[:-8] + ".reason"
674 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
676 # Move all the files into the reject directory
677 reject_files = pkg.files.keys() + [pkg.changes_file]
678 self.force_reject(reject_files)
680 # If we fail here someone is probably trying to exploit the race
681 # so let's just raise an exception ...
682 if os.path.exists(reason_filename):
683 os.unlink(reason_filename)
684 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
687 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
688 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
689 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
690 os.write(reason_fd, reject_message)
691 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
693 # Build up the rejection email
694 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
696 Subst["__REJECTOR_ADDRESS__"] = user_email_address
697 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
698 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
699 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
700 # Write the rejection email out as the <foo>.reason file
701 os.write(reason_fd, reject_mail_message)
705 # Send the rejection mail if appropriate
706 if not Cnf["Dinstall::Options::No-Mail"]:
707 utils.send_mail(reject_mail_message)
709 self.Logger.log(["rejected", pkg.changes_file])
712 ################################################################################
714 # Ensure that source exists somewhere in the archive for the binary
715 # upload being processed.
717 # (1) exact match => 1.0-3
718 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
720 def source_exists (self, package, source_version, suites = ["any"]):
724 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
727 # source must exist in suite X, or in some other suite that's
728 # mapped to X, recursively... silent-maps are counted too,
729 # unreleased-maps aren't.
730 maps = self.Cnf.ValueList("SuiteMappings")[:]
732 maps = [ m.split() for m in maps ]
733 maps = [ (x[1], x[2]) for x in maps
734 if x[0] == "map" or x[0] == "silent-map" ]
737 if x[1] in s and x[0] not in s:
740 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
741 q = self.projectB.query(que)
743 # Reduce the query results to a list of version numbers
744 ql = [ i[0] for i in q.getresult() ]
747 if source_version in ql:
751 orig_source_version = re_bin_only_nmu.sub('', source_version)
752 if orig_source_version in ql:
760 ################################################################################
762 def in_override_p (self, package, component, suite, binary_type, file):
763 files = self.pkg.files
765 if binary_type == "": # must be source
770 # Override suite name; used for example with proposed-updates
771 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
772 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
774 # Avoid <undef> on unknown distributions
775 suite_id = database.get_suite_id(suite)
778 component_id = database.get_component_id(component)
779 type_id = database.get_override_type_id(type)
781 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
782 % (package, suite_id, component_id, type_id))
783 result = q.getresult()
784 # If checking for a source package fall back on the binary override type
785 if type == "dsc" and not result:
786 deb_type_id = database.get_override_type_id("deb")
787 udeb_type_id = database.get_override_type_id("udeb")
788 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
789 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
790 result = q.getresult()
792 # Remember the section and priority so we can check them later if appropriate
794 files[file]["override section"] = result[0][0]
795 files[file]["override priority"] = result[0][1]
799 ################################################################################
801 def reject (self, str, prefix="Rejected: "):
803 # Unlike other rejects we add new lines first to avoid trailing
804 # new lines when this message is passed back up to a caller.
805 if self.reject_message:
806 self.reject_message += "\n"
807 self.reject_message += prefix + str
809 ################################################################################
811 def get_anyversion(self, query_result, suite):
813 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
814 for (v, s) in query_result:
815 if s in [ x.lower() for x in anysuite ]:
816 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
820 ################################################################################
822 def cross_suite_version_check(self, query_result, file, new_version):
823 """Ensure versions are newer than existing packages in target
824 suites and that cross-suite version checking rules as
825 set out in the conf file are satisfied."""
827 # Check versions for each target suite
828 for target_suite in self.pkg.changes["distribution"].keys():
829 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
830 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
831 # Enforce "must be newer than target suite" even if conffile omits it
832 if target_suite not in must_be_newer_than:
833 must_be_newer_than.append(target_suite)
834 for entry in query_result:
835 existent_version = entry[0]
837 if suite in must_be_newer_than and \
838 apt_pkg.VersionCompare(new_version, existent_version) < 1:
839 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
840 if suite in must_be_older_than and \
841 apt_pkg.VersionCompare(new_version, existent_version) > -1:
842 ch = self.pkg.changes
844 if ch.get('distribution-version', {}).has_key(suite):
845 # we really use the other suite, ignoring the conflicting one ...
846 addsuite = ch["distribution-version"][suite]
848 add_version = self.get_anyversion(query_result, addsuite)
849 target_version = self.get_anyversion(query_result, target_suite)
852 # not add_version can only happen if we map to a suite
853 # that doesn't enhance the suite we're propup'ing from.
854 # so "propup-ver x a b c; map a d" is a problem only if
855 # d doesn't enhance a.
857 # i think we could always propagate in this case, rather
858 # than complaining. either way, this isn't a REJECT issue
860 # And - we really should complain to the dorks who configured dak
861 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
862 self.pkg.changes.setdefault("propdistribution", {})
863 self.pkg.changes["propdistribution"][addsuite] = 1
865 elif not target_version:
866 # not targets_version is true when the package is NEW
867 # we could just stick with the "...old version..." REJECT
869 self.reject("Won't propogate NEW packages.")
870 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
871 # propogation would be redundant. no need to reject though.
872 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
874 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
875 apt_pkg.VersionCompare(add_version, target_version) >= 0:
877 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
878 self.pkg.changes.setdefault("propdistribution", {})
879 self.pkg.changes["propdistribution"][addsuite] = 1
883 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
885 ################################################################################
887 def check_binary_against_db(self, file):
888 self.reject_message = ""
889 files = self.pkg.files
891 # Ensure version is sane
892 q = self.projectB.query("""
893 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
895 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
896 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
897 % (files[file]["package"],
898 files[file]["architecture"]))
899 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
901 # Check for any existing copies of the file
902 q = self.projectB.query("""
903 SELECT b.id FROM binaries b, architecture a
904 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
905 AND a.id = b.architecture"""
906 % (files[file]["package"],
907 files[file]["version"],
908 files[file]["architecture"]))
910 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
912 return self.reject_message
914 ################################################################################
916 def check_source_against_db(self, file):
917 self.reject_message = ""
920 # Ensure version is sane
921 q = self.projectB.query("""
922 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
923 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
924 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
926 return self.reject_message
928 ################################################################################
931 # NB: this function can remove entries from the 'files' index [if
932 # the .orig.tar.gz is a duplicate of the one in the archive]; if
933 # you're iterating over 'files' and call this function as part of
934 # the loop, be sure to add a check to the top of the loop to
935 # ensure you haven't just tried to dereference the deleted entry.
938 def check_dsc_against_db(self, file):
939 self.reject_message = ""
940 files = self.pkg.files
941 dsc_files = self.pkg.dsc_files
942 legacy_source_untouchable = self.pkg.legacy_source_untouchable
943 self.pkg.orig_tar_gz = None
945 # Try and find all files mentioned in the .dsc. This has
946 # to work harder to cope with the multiple possible
947 # locations of an .orig.tar.gz.
948 # The ordering on the select is needed to pick the newest orig
949 # when it exists in multiple places.
950 for dsc_file in dsc_files.keys():
952 if files.has_key(dsc_file):
953 actual_md5 = files[dsc_file]["md5sum"]
954 actual_size = int(files[dsc_file]["size"])
955 found = "%s in incoming" % (dsc_file)
956 # Check the file does not already exist in the archive
957 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
959 # Strip out anything that isn't '%s' or '/%s$'
961 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
964 # "[dak] has not broken them. [dak] has fixed a
965 # brokenness. Your crappy hack exploited a bug in
968 # "(Come on! I thought it was always obvious that
969 # one just doesn't release different files with
970 # the same name and version.)"
971 # -- ajk@ on d-devel@l.d.o
974 # Ignore exact matches for .orig.tar.gz
976 if dsc_file.endswith(".orig.tar.gz"):
978 if files.has_key(dsc_file) and \
979 int(files[dsc_file]["size"]) == int(i[0]) and \
980 files[dsc_file]["md5sum"] == i[1]:
981 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
983 self.pkg.orig_tar_gz = i[2] + i[3]
987 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
988 elif dsc_file.endswith(".orig.tar.gz"):
990 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
992 # Strip out anything that isn't '%s' or '/%s$'
994 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
998 # Unfortunately, we may get more than one match here if,
999 # for example, the package was in potato but had an -sa
1000 # upload in woody. So we need to choose the right one.
1002 x = ql[0]; # default to something sane in case we don't match any or have only one
1006 old_file = i[0] + i[1]
1007 old_file_fh = utils.open_file(old_file)
1008 actual_md5 = apt_pkg.md5sum(old_file_fh)
1010 actual_size = os.stat(old_file)[stat.ST_SIZE]
1011 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1014 legacy_source_untouchable[i[3]] = ""
1016 old_file = x[0] + x[1]
1017 old_file_fh = utils.open_file(old_file)
1018 actual_md5 = apt_pkg.md5sum(old_file_fh)
1020 actual_size = os.stat(old_file)[stat.ST_SIZE]
1023 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1024 # See install() in process-accepted...
1025 self.pkg.orig_tar_id = x[3]
1026 self.pkg.orig_tar_gz = old_file
1027 if suite_type == "legacy" or suite_type == "legacy-mixed":
1028 self.pkg.orig_tar_location = "legacy"
1030 self.pkg.orig_tar_location = x[4]
1032 # Not there? Check the queue directories...
1034 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1035 # See process_it() in 'dak process-unchecked' for explanation of this
1036 # in_unchecked check dropped by ajt 2007-08-28, how did that
1038 if os.path.exists(in_unchecked) and False:
1039 return (self.reject_message, in_unchecked)
1041 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1042 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1043 if os.path.exists(in_otherdir):
1044 in_otherdir_fh = utils.open_file(in_otherdir)
1045 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1046 in_otherdir_fh.close()
1047 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1049 self.pkg.orig_tar_gz = in_otherdir
1052 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1053 self.pkg.orig_tar_gz = -1
1056 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1058 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1059 self.reject("md5sum for %s doesn't match %s." % (found, file))
1060 if actual_size != int(dsc_files[dsc_file]["size"]):
1061 self.reject("size for %s doesn't match %s." % (found, file))
1063 return (self.reject_message, None)
1065 def do_query(self, q):
1066 sys.stderr.write("query: \"%s\" ... " % (q))
1067 before = time.time()
1068 r = self.projectB.query(q)
1069 time_diff = time.time()-before
1070 sys.stderr.write("took %.3f seconds.\n" % (time_diff))