3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ################################################################################
37 # Determine what parts in a .changes are NEW
39 def determine_new(changes, files, projectB, warn=1):
42 # Build up a list of potentially new things
43 for file in files.keys():
45 # Skip byhand elements
46 if f["type"] == "byhand":
49 priority = f["priority"]
50 section = f["section"]
52 component = f["component"]
56 if not new.has_key(pkg):
58 new[pkg]["priority"] = priority
59 new[pkg]["section"] = section
60 new[pkg]["type"] = type
61 new[pkg]["component"] = component
62 new[pkg]["files"] = []
64 old_type = new[pkg]["type"]
66 # source gets trumped by deb or udeb
68 new[pkg]["priority"] = priority
69 new[pkg]["section"] = section
70 new[pkg]["type"] = type
71 new[pkg]["component"] = component
72 new[pkg]["files"].append(file)
73 if f.has_key("othercomponents"):
74 new[pkg]["othercomponents"] = f["othercomponents"]
76 for suite in changes["suite"].keys():
77 suite_id = database.get_suite_id(suite)
78 for pkg in new.keys():
79 component_id = database.get_component_id(new[pkg]["component"])
80 type_id = database.get_override_type_id(new[pkg]["type"])
81 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84 for file in new[pkg]["files"]:
85 if files[file].has_key("new"):
86 del files[file]["new"]
90 if changes["suite"].has_key("stable"):
91 print "WARNING: overrides will be added for stable!"
92 if changes["suite"].has_key("oldstable"):
93 print "WARNING: overrides will be added for OLDstable!"
94 for pkg in new.keys():
95 if new[pkg].has_key("othercomponents"):
96 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
100 ################################################################################
104 if f.has_key("dbtype"):
106 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109 fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
111 # Validate the override type
112 type_id = database.get_override_type_id(type)
114 fubar("invalid type (%s) for new. Say wha?" % (type))
118 ################################################################################
120 # check if section/priority values are valid
122 def check_valid(new):
123 for pkg in new.keys():
124 section = new[pkg]["section"]
125 priority = new[pkg]["priority"]
126 type = new[pkg]["type"]
127 new[pkg]["section id"] = database.get_section_id(section)
128 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130 di = section.find("debian-installer") != -1
131 if (di and type != "udeb") or (not di and type == "udeb"):
132 new[pkg]["section id"] = -1
133 if (priority == "source" and type != "dsc") or \
134 (priority != "source" and type == "dsc"):
135 new[pkg]["priority id"] = -1
138 ###############################################################################
140 # Convenience wrapper to carry around all the package information in
143 def __init__(self, **kwds):
144 self.__dict__.update(kwds)
146 def update(self, **kwds):
147 self.__dict__.update(kwds)
149 ###############################################################################
153 def __init__(self, Cnf):
155 self.accept_count = 0
156 self.accept_bytes = 0L
157 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158 legacy_source_untouchable = {})
160 # Initialize the substitution template mapping global
161 Subst = self.Subst = {}
162 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168 database.init(Cnf, self.projectB)
170 ###########################################################################
172 def init_vars (self):
173 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174 exec "self.pkg.%s.clear();" % (i)
175 self.pkg.orig_tar_id = None
176 self.pkg.orig_tar_location = ""
177 self.pkg.orig_tar_gz = None
179 ###########################################################################
181 def update_vars (self):
182 dump_filename = self.pkg.changes_file[:-8]+".dak"
183 dump_file = utils.open_file(dump_filename)
184 p = cPickle.Unpickler(dump_file)
185 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186 exec "self.pkg.%s.update(p.load());" % (i)
187 for i in [ "orig_tar_id", "orig_tar_location" ]:
188 exec "self.pkg.%s = p.load();" % (i)
191 ###########################################################################
193 # This could just dump the dictionaries as is, but I'd like to
194 # avoid this so there's some idea of what process-accepted &
195 # process-new use from process-unchecked
197 def dump_vars(self, dest_dir):
198 for i in [ "changes", "dsc", "files", "dsc_files",
199 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200 exec "%s = self.pkg.%s;" % (i,i)
201 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202 dump_file = utils.open_file(dump_filename, 'w')
204 os.chmod(dump_filename, 0660)
206 if errno.errorcode[e.errno] == 'EPERM':
207 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208 if perms & stat.S_IROTH:
209 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
213 p = cPickle.Pickler(dump_file, 1)
214 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
217 for file in files.keys():
219 for i in [ "package", "version", "architecture", "type", "size",
220 "md5sum", "component", "location id", "source package",
221 "source version", "maintainer", "dbtype", "files id",
222 "new", "section", "priority", "othercomponents",
223 "pool name", "original component" ]:
224 if files[file].has_key(i):
225 d_files[file][i] = files[file][i]
227 # Mandatory changes fields
228 for i in [ "distribution", "source", "architecture", "version",
229 "maintainer", "urgency", "fingerprint", "changedby822",
230 "changedby2047", "changedbyname", "maintainer822",
231 "maintainer2047", "maintainername", "maintaineremail",
232 "closes", "changes" ]:
233 d_changes[i] = changes[i]
234 # Optional changes fields
235 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236 if changes.has_key(i):
237 d_changes[i] = changes[i]
239 for i in [ "source", "version", "maintainer", "fingerprint",
240 "uploaders", "bts changelog", "dm-upload-allowed" ]:
244 for file in dsc_files.keys():
245 d_dsc_files[file] = {}
246 # Mandatory dsc_files fields
247 for i in [ "size", "md5sum" ]:
248 d_dsc_files[file][i] = dsc_files[file][i]
249 # Optional dsc_files fields
250 for i in [ "files id" ]:
251 if dsc_files[file].has_key(i):
252 d_dsc_files[file][i] = dsc_files[file][i]
254 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
259 ###########################################################################
261 # Set up the per-package template substitution mappings
263 def update_subst (self, reject_message = ""):
265 changes = self.pkg.changes
266 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268 changes["architecture"] = { "Unknown" : "" }
269 # and maintainer2047 may not exist.
270 if not changes.has_key("maintainer2047"):
271 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
273 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
277 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281 changes["maintainer2047"])
282 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
284 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
285 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
286 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
287 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
288 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
290 # Apply any global override of the Maintainer field
291 if self.Cnf.get("Dinstall::OverrideMaintainer"):
292 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
293 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
295 Subst["__REJECT_MESSAGE__"] = reject_message
296 Subst["__SOURCE__"] = changes.get("source", "Unknown")
297 Subst["__VERSION__"] = changes.get("version", "Unknown")
299 ###########################################################################
301 def build_summaries(self):
302 changes = self.pkg.changes
303 files = self.pkg.files
305 byhand = summary = new = ""
307 # changes["distribution"] may not exist in corner cases
308 # (e.g. unreadable changes files)
309 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
310 changes["distribution"] = {}
312 override_summary ="";
313 file_keys = files.keys()
315 for file in file_keys:
316 if files[file].has_key("byhand"):
318 summary += file + " byhand\n"
319 elif files[file].has_key("new"):
321 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
322 if files[file].has_key("othercomponents"):
323 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
324 if files[file]["type"] == "deb":
325 deb_fh = utils.open_file(file)
326 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
329 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
330 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
331 summary += file + "\n to " + destination + "\n"
332 if not files[file].has_key("type"):
333 files[file]["type"] = "unknown"
334 if files[file]["type"] in ["deb", "udeb", "dsc"]:
335 # (queue/unchecked), there we have override entries already, use them
336 # (process-new), there we dont have override entries, use the newly generated ones.
337 override_prio = files[file].get("override priority", files[file]["priority"])
338 override_sect = files[file].get("override section", files[file]["section"])
339 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
341 short_summary = summary
343 # This is for direport's benefit...
344 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
347 summary += "Changes: " + f
349 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
351 summary += self.announce(short_summary, 0)
353 return (summary, short_summary)
355 ###########################################################################
357 def close_bugs (self, summary, action):
358 changes = self.pkg.changes
362 bugs = changes["closes"].keys()
368 summary += "Closing bugs: "
370 summary += "%s " % (bug)
372 Subst["__BUG_NUMBER__"] = bug
373 if changes["distribution"].has_key("stable"):
374 Subst["__STABLE_WARNING__"] = """
375 Note that this package is not part of the released stable Debian
376 distribution. It may have dependencies on other unreleased software,
377 or other instabilities. Please take care if you wish to install it.
378 The update will eventually make its way into the next released Debian
381 Subst["__STABLE_WARNING__"] = ""
382 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
383 utils.send_mail (mail_message)
385 self.Logger.log(["closing bugs"]+bugs)
390 ###########################################################################
392 def announce (self, short_summary, action):
395 changes = self.pkg.changes
397 # Only do announcements for source uploads with a recent dpkg-dev installed
398 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
403 Subst["__SHORT_SUMMARY__"] = short_summary
405 for dist in changes["distribution"].keys():
406 list = Cnf.Find("Suite::%s::Announce" % (dist))
407 if list == "" or lists_done.has_key(list):
410 summary += "Announcing to %s\n" % (list)
413 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
414 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
415 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
416 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
417 utils.send_mail (mail_message)
419 if Cnf.FindB("Dinstall::CloseBugs"):
420 summary = self.close_bugs(summary, action)
424 ###########################################################################
426 def accept (self, summary, short_summary):
429 files = self.pkg.files
430 changes = self.pkg.changes
431 changes_file = self.pkg.changes_file
435 self.Logger.log(["Accepting changes",changes_file])
437 self.dump_vars(Cnf["Dir::Queue::Accepted"])
439 # Move all the files into the accepted directory
440 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
441 file_keys = files.keys()
442 for file in file_keys:
443 utils.move(file, Cnf["Dir::Queue::Accepted"])
444 self.accept_bytes += float(files[file]["size"])
445 self.accept_count += 1
447 # Send accept mail, announce to lists, close bugs and check for
448 # override disparities
449 if not Cnf["Dinstall::Options::No-Mail"]:
450 Subst["__SUITE__"] = ""
451 Subst["__SUMMARY__"] = summary
452 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
453 utils.send_mail(mail_message)
454 self.announce(short_summary, 1)
457 ## Helper stuff for DebBugs Version Tracking
458 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
459 # ??? once queue/* is cleared on *.d.o and/or reprocessed
460 # the conditionalization on dsc["bts changelog"] should be
463 # Write out the version history from the changelog
464 if changes["architecture"].has_key("source") and \
465 dsc.has_key("bts changelog"):
467 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
468 dotprefix=1, perms=0644)
469 version_history = utils.open_file(temp_filename, 'w')
470 version_history.write(dsc["bts changelog"])
471 version_history.close()
472 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
473 changes_file[:-8]+".versions")
474 os.rename(temp_filename, filename)
476 # Write out the binary -> source mapping.
477 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
478 dotprefix=1, perms=0644)
479 debinfo = utils.open_file(temp_filename, 'w')
480 for file in file_keys:
482 if f["type"] == "deb":
483 line = " ".join([f["package"], f["version"],
484 f["architecture"], f["source package"],
485 f["source version"]])
486 debinfo.write(line+"\n")
488 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
489 changes_file[:-8]+".debinfo")
490 os.rename(temp_filename, filename)
492 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
494 ###########################################################################
496 def queue_build (self, queue, path):
499 files = self.pkg.files
500 changes = self.pkg.changes
501 changes_file = self.pkg.changes_file
503 file_keys = files.keys()
505 ## Special support to enable clean auto-building of queued packages
506 queue_id = database.get_or_set_queue_id(queue)
508 self.projectB.query("BEGIN WORK")
509 for suite in changes["distribution"].keys():
510 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
512 suite_id = database.get_suite_id(suite)
513 dest_dir = Cnf["Dir::QueueBuild"]
514 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
515 dest_dir = os.path.join(dest_dir, suite)
516 for file in file_keys:
517 src = os.path.join(path, file)
518 dest = os.path.join(dest_dir, file)
519 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
520 # Copy it since the original won't be readable by www-data
521 utils.copy(src, dest)
523 # Create a symlink to it
524 os.symlink(src, dest)
525 # Add it to the list of packages for later processing by apt-ftparchive
526 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
527 # If the .orig.tar.gz is in the pool, create a symlink to
528 # it (if one doesn't already exist)
529 if self.pkg.orig_tar_id:
530 # Determine the .orig.tar.gz file name
531 for dsc_file in self.pkg.dsc_files.keys():
532 if dsc_file.endswith(".orig.tar.gz"):
534 dest = os.path.join(dest_dir, filename)
535 # If it doesn't exist, create a symlink
536 if not os.path.exists(dest):
537 # Find the .orig.tar.gz in the pool
538 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
541 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
542 src = os.path.join(ql[0][0], ql[0][1])
543 os.symlink(src, dest)
544 # Add it to the list of packages for later processing by apt-ftparchive
545 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
546 # if it does, update things to ensure it's not removed prematurely
548 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
550 self.projectB.query("COMMIT WORK")
552 ###########################################################################
554 def check_override (self):
556 changes = self.pkg.changes
557 files = self.pkg.files
560 # Abandon the check if:
561 # a) it's a non-sourceful upload
562 # b) override disparity checks have been disabled
563 # c) we're not sending mail
564 if not changes["architecture"].has_key("source") or \
565 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
566 Cnf["Dinstall::Options::No-Mail"]:
570 file_keys = files.keys()
572 for file in file_keys:
573 if not files[file].has_key("new") and files[file]["type"] == "deb":
574 section = files[file]["section"]
575 override_section = files[file]["override section"]
576 if section.lower() != override_section.lower() and section != "-":
577 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
578 priority = files[file]["priority"]
579 override_priority = files[file]["override priority"]
580 if priority != override_priority and priority != "-":
581 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
586 Subst["__SUMMARY__"] = summary
587 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
588 utils.send_mail(mail_message)
590 ###########################################################################
592 def force_reject (self, files):
593 """Forcefully move files from the current directory to the
594 reject directory. If any file already exists in the reject
595 directory it will be moved to the morgue to make way for
601 # Skip any files which don't exist or which we don't have permission to copy.
602 if os.access(file,os.R_OK) == 0:
604 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
606 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
608 # File exists? Let's try and move it to the morgue
609 if errno.errorcode[e.errno] == 'EEXIST':
610 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
612 morgue_file = utils.find_next_free(morgue_file)
613 except utils.tried_too_hard_exc:
614 # Something's either gone badly Pete Tong, or
615 # someone is trying to exploit us.
616 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
618 utils.move(dest_file, morgue_file, perms=0660)
620 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
623 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
627 # If we got here, we own the destination file, so we can
628 # safely overwrite it.
629 utils.move(file, dest_file, 1, perms=0660)
632 ###########################################################################
634 def do_reject (self, manual = 0, reject_message = ""):
635 # If we weren't given a manual rejection message, spawn an
636 # editor so the user can add one in...
637 if manual and not reject_message:
638 temp_filename = utils.temp_filename()
639 editor = os.environ.get("EDITOR","vi")
642 os.system("%s %s" % (editor, temp_filename))
643 temp_fh = utils.open_file(temp_filename)
644 reject_message = "".join(temp_fh.readlines())
646 print "Reject message:"
647 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
648 prompt = "[R]eject, Edit, Abandon, Quit ?"
650 while prompt.find(answer) == -1:
651 answer = utils.our_raw_input(prompt)
652 m = re_default_answer.search(prompt)
655 answer = answer[:1].upper()
656 os.unlink(temp_filename)
668 reason_filename = pkg.changes_file[:-8] + ".reason"
669 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
671 # Move all the files into the reject directory
672 reject_files = pkg.files.keys() + [pkg.changes_file]
673 self.force_reject(reject_files)
675 # If we fail here someone is probably trying to exploit the race
676 # so let's just raise an exception ...
677 if os.path.exists(reason_filename):
678 os.unlink(reason_filename)
679 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
682 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
683 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
684 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
685 os.write(reason_fd, reject_message)
686 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
688 # Build up the rejection email
689 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
691 Subst["__REJECTOR_ADDRESS__"] = user_email_address
692 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
693 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
694 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
695 # Write the rejection email out as the <foo>.reason file
696 os.write(reason_fd, reject_mail_message)
700 # Send the rejection mail if appropriate
701 if not Cnf["Dinstall::Options::No-Mail"]:
702 utils.send_mail(reject_mail_message)
704 self.Logger.log(["rejected", pkg.changes_file])
707 ################################################################################
709 # Ensure that source exists somewhere in the archive for the binary
710 # upload being processed.
712 # (1) exact match => 1.0-3
713 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
715 def source_exists (self, package, source_version, suites = ["any"]):
719 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
722 # source must exist in suite X, or in some other suite that's
723 # mapped to X, recursively... silent-maps are counted too,
724 # unreleased-maps aren't.
725 maps = self.Cnf.ValueList("SuiteMappings")[:]
727 maps = [ m.split() for m in maps ]
728 maps = [ (x[1], x[2]) for x in maps
729 if x[0] == "map" or x[0] == "silent-map" ]
732 if x[1] in s and x[0] not in s:
735 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
736 q = self.projectB.query(que)
738 # Reduce the query results to a list of version numbers
739 ql = [ i[0] for i in q.getresult() ]
742 if source_version in ql:
746 orig_source_version = re_bin_only_nmu.sub('', source_version)
747 if orig_source_version in ql:
755 ################################################################################
757 def in_override_p (self, package, component, suite, binary_type, file):
758 files = self.pkg.files
760 if binary_type == "": # must be source
765 # Override suite name; used for example with proposed-updates
766 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
767 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
769 # Avoid <undef> on unknown distributions
770 suite_id = database.get_suite_id(suite)
773 component_id = database.get_component_id(component)
774 type_id = database.get_override_type_id(type)
776 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
777 % (package, suite_id, component_id, type_id))
778 result = q.getresult()
779 # If checking for a source package fall back on the binary override type
780 if type == "dsc" and not result:
781 deb_type_id = database.get_override_type_id("deb")
782 udeb_type_id = database.get_override_type_id("udeb")
783 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
784 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
785 result = q.getresult()
787 # Remember the section and priority so we can check them later if appropriate
789 files[file]["override section"] = result[0][0]
790 files[file]["override priority"] = result[0][1]
794 ################################################################################
796 def reject (self, str, prefix="Rejected: "):
798 # Unlike other rejects we add new lines first to avoid trailing
799 # new lines when this message is passed back up to a caller.
800 if self.reject_message:
801 self.reject_message += "\n"
802 self.reject_message += prefix + str
804 ################################################################################
806 def get_anyversion(self, query_result, suite):
808 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
809 for (v, s) in query_result:
810 if s in [ x.lower() for x in anysuite ]:
811 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
815 ################################################################################
817 def cross_suite_version_check(self, query_result, file, new_version):
818 """Ensure versions are newer than existing packages in target
819 suites and that cross-suite version checking rules as
820 set out in the conf file are satisfied."""
822 # Check versions for each target suite
823 for target_suite in self.pkg.changes["distribution"].keys():
824 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
825 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
826 # Enforce "must be newer than target suite" even if conffile omits it
827 if target_suite not in must_be_newer_than:
828 must_be_newer_than.append(target_suite)
829 for entry in query_result:
830 existent_version = entry[0]
832 if suite in must_be_newer_than and \
833 apt_pkg.VersionCompare(new_version, existent_version) < 1:
834 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
835 if suite in must_be_older_than and \
836 apt_pkg.VersionCompare(new_version, existent_version) > -1:
837 ch = self.pkg.changes
839 if ch.get('distribution-version', {}).has_key(suite):
840 # we really use the other suite, ignoring the conflicting one ...
841 addsuite = ch["distribution-version"][suite]
843 add_version = self.get_anyversion(query_result, addsuite)
844 target_version = self.get_anyversion(query_result, target_suite)
847 # not add_version can only happen if we map to a suite
848 # that doesn't enhance the suite we're propup'ing from.
849 # so "propup-ver x a b c; map a d" is a problem only if
850 # d doesn't enhance a.
852 # i think we could always propagate in this case, rather
853 # than complaining. either way, this isn't a REJECT issue
855 # And - we really should complain to the dorks who configured dak
856 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
857 self.pkg.changes.setdefault("propdistribution", {})
858 self.pkg.changes["propdistribution"][addsuite] = 1
860 elif not target_version:
861 # not targets_version is true when the package is NEW
862 # we could just stick with the "...old version..." REJECT
864 self.reject("Won't propogate NEW packages.")
865 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
866 # propogation would be redundant. no need to reject though.
867 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
869 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
870 apt_pkg.VersionCompare(add_version, target_version) >= 0:
872 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
873 self.pkg.changes.setdefault("propdistribution", {})
874 self.pkg.changes["propdistribution"][addsuite] = 1
878 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
880 ################################################################################
882 def check_binary_against_db(self, file):
883 self.reject_message = ""
884 files = self.pkg.files
886 # Ensure version is sane
887 q = self.projectB.query("""
888 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
890 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
891 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
892 % (files[file]["package"],
893 files[file]["architecture"]))
894 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
896 # Check for any existing copies of the file
897 q = self.projectB.query("""
898 SELECT b.id FROM binaries b, architecture a
899 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
900 AND a.id = b.architecture"""
901 % (files[file]["package"],
902 files[file]["version"],
903 files[file]["architecture"]))
905 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
907 return self.reject_message
909 ################################################################################
911 def check_source_against_db(self, file):
912 self.reject_message = ""
915 # Ensure version is sane
916 q = self.projectB.query("""
917 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
918 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
919 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
921 return self.reject_message
923 ################################################################################
926 # NB: this function can remove entries from the 'files' index [if
927 # the .orig.tar.gz is a duplicate of the one in the archive]; if
928 # you're iterating over 'files' and call this function as part of
929 # the loop, be sure to add a check to the top of the loop to
930 # ensure you haven't just tried to dereference the deleted entry.
933 def check_dsc_against_db(self, file):
934 self.reject_message = ""
935 files = self.pkg.files
936 dsc_files = self.pkg.dsc_files
937 legacy_source_untouchable = self.pkg.legacy_source_untouchable
938 self.pkg.orig_tar_gz = None
940 # Try and find all files mentioned in the .dsc. This has
941 # to work harder to cope with the multiple possible
942 # locations of an .orig.tar.gz.
943 # The ordering on the select is needed to pick the newest orig
944 # when it exists in multiple places.
945 for dsc_file in dsc_files.keys():
947 if files.has_key(dsc_file):
948 actual_md5 = files[dsc_file]["md5sum"]
949 actual_size = int(files[dsc_file]["size"])
950 found = "%s in incoming" % (dsc_file)
951 # Check the file does not already exist in the archive
952 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
954 # Strip out anything that isn't '%s' or '/%s$'
956 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
959 # "[dak] has not broken them. [dak] has fixed a
960 # brokenness. Your crappy hack exploited a bug in
963 # "(Come on! I thought it was always obvious that
964 # one just doesn't release different files with
965 # the same name and version.)"
966 # -- ajk@ on d-devel@l.d.o
969 # Ignore exact matches for .orig.tar.gz
971 if dsc_file.endswith(".orig.tar.gz"):
973 if files.has_key(dsc_file) and \
974 int(files[dsc_file]["size"]) == int(i[0]) and \
975 files[dsc_file]["md5sum"] == i[1]:
976 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
978 self.pkg.orig_tar_gz = i[2] + i[3]
982 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
983 elif dsc_file.endswith(".orig.tar.gz"):
985 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
987 # Strip out anything that isn't '%s' or '/%s$'
989 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
993 # Unfortunately, we may get more than one match here if,
994 # for example, the package was in potato but had an -sa
995 # upload in woody. So we need to choose the right one.
997 x = ql[0]; # default to something sane in case we don't match any or have only one
1001 old_file = i[0] + i[1]
1002 old_file_fh = utils.open_file(old_file)
1003 actual_md5 = apt_pkg.md5sum(old_file_fh)
1005 actual_size = os.stat(old_file)[stat.ST_SIZE]
1006 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1009 legacy_source_untouchable[i[3]] = ""
1011 old_file = x[0] + x[1]
1012 old_file_fh = utils.open_file(old_file)
1013 actual_md5 = apt_pkg.md5sum(old_file_fh)
1015 actual_size = os.stat(old_file)[stat.ST_SIZE]
1018 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1019 # See install() in process-accepted...
1020 self.pkg.orig_tar_id = x[3]
1021 self.pkg.orig_tar_gz = old_file
1022 if suite_type == "legacy" or suite_type == "legacy-mixed":
1023 self.pkg.orig_tar_location = "legacy"
1025 self.pkg.orig_tar_location = x[4]
1027 # Not there? Check the queue directories...
1029 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1030 # See process_it() in 'dak process-unchecked' for explanation of this
1031 # in_unchecked check dropped by ajt 2007-08-28, how did that
1033 if os.path.exists(in_unchecked) and False:
1034 return (self.reject_message, in_unchecked)
1036 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1037 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1038 if os.path.exists(in_otherdir):
1039 in_otherdir_fh = utils.open_file(in_otherdir)
1040 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1041 in_otherdir_fh.close()
1042 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1044 self.pkg.orig_tar_gz = in_otherdir
1047 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1048 self.pkg.orig_tar_gz = -1
1051 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1053 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1054 self.reject("md5sum for %s doesn't match %s." % (found, file))
1055 if actual_size != int(dsc_files[dsc_file]["size"]):
1056 self.reject("size for %s doesn't match %s." % (found, file))
1058 return (self.reject_message, None)
1060 def do_query(self, q):
1061 sys.stderr.write("query: \"%s\" ... " % (q))
1062 before = time.time()
1063 r = self.projectB.query(q)
1064 time_diff = time.time()-before
1065 sys.stderr.write("took %.3f seconds.\n" % (time_diff))