3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ################################################################################
37 # Determine what parts in a .changes are NEW
39 def determine_new(changes, files, projectB, warn=1):
42 # Build up a list of potentially new things
43 for file in files.keys():
45 # Skip byhand elements
46 if f["type"] == "byhand":
49 priority = f["priority"]
50 section = f["section"]
52 component = f["component"]
56 if not new.has_key(pkg):
58 new[pkg]["priority"] = priority
59 new[pkg]["section"] = section
60 new[pkg]["type"] = type
61 new[pkg]["component"] = component
62 new[pkg]["files"] = []
64 old_type = new[pkg]["type"]
66 # source gets trumped by deb or udeb
68 new[pkg]["priority"] = priority
69 new[pkg]["section"] = section
70 new[pkg]["type"] = type
71 new[pkg]["component"] = component
72 new[pkg]["files"].append(file)
73 if f.has_key("othercomponents"):
74 new[pkg]["othercomponents"] = f["othercomponents"]
76 for suite in changes["suite"].keys():
77 suite_id = database.get_suite_id(suite)
78 for pkg in new.keys():
79 component_id = database.get_component_id(new[pkg]["component"])
80 type_id = database.get_override_type_id(new[pkg]["type"])
81 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84 for file in new[pkg]["files"]:
85 if files[file].has_key("new"):
86 del files[file]["new"]
90 if changes["suite"].has_key("stable"):
91 print "WARNING: overrides will be added for stable!"
92 if changes["suite"].has_key("oldstable"):
93 print "WARNING: overrides will be added for OLDstable!"
94 for pkg in new.keys():
95 if new[pkg].has_key("othercomponents"):
96 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
100 ################################################################################
104 if f.has_key("dbtype"):
106 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
111 # Validate the override type
112 type_id = database.get_override_type_id(type)
114 utils.fubar("invalid type (%s) for new. Say wha?" % (type))
118 ################################################################################
120 # check if section/priority values are valid
122 def check_valid(new):
123 for pkg in new.keys():
124 section = new[pkg]["section"]
125 priority = new[pkg]["priority"]
126 type = new[pkg]["type"]
127 new[pkg]["section id"] = database.get_section_id(section)
128 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130 di = section.find("debian-installer") != -1
131 if (di and type != "udeb") or (not di and type == "udeb"):
132 new[pkg]["section id"] = -1
133 if (priority == "source" and type != "dsc") or \
134 (priority != "source" and type == "dsc"):
135 new[pkg]["priority id"] = -1
138 ###############################################################################
140 # Convenience wrapper to carry around all the package information in
143 def __init__(self, **kwds):
144 self.__dict__.update(kwds)
146 def update(self, **kwds):
147 self.__dict__.update(kwds)
149 ###############################################################################
153 def __init__(self, Cnf):
155 self.accept_count = 0
156 self.accept_bytes = 0L
157 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158 legacy_source_untouchable = {})
160 # Initialize the substitution template mapping global
161 Subst = self.Subst = {}
162 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168 database.init(Cnf, self.projectB)
170 ###########################################################################
172 def init_vars (self):
173 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174 exec "self.pkg.%s.clear();" % (i)
175 self.pkg.orig_tar_id = None
176 self.pkg.orig_tar_location = ""
177 self.pkg.orig_tar_gz = None
179 ###########################################################################
181 def update_vars (self):
182 dump_filename = self.pkg.changes_file[:-8]+".dak"
183 dump_file = utils.open_file(dump_filename)
184 p = cPickle.Unpickler(dump_file)
185 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186 exec "self.pkg.%s.update(p.load());" % (i)
187 for i in [ "orig_tar_id", "orig_tar_location" ]:
188 exec "self.pkg.%s = p.load();" % (i)
191 ###########################################################################
193 # This could just dump the dictionaries as is, but I'd like to
194 # avoid this so there's some idea of what process-accepted &
195 # process-new use from process-unchecked
197 def dump_vars(self, dest_dir):
198 for i in [ "changes", "dsc", "files", "dsc_files",
199 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200 exec "%s = self.pkg.%s;" % (i,i)
201 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202 dump_file = utils.open_file(dump_filename, 'w')
204 os.chmod(dump_filename, 0660)
206 if errno.errorcode[e.errno] == 'EPERM':
207 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208 if perms & stat.S_IROTH:
209 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
213 p = cPickle.Pickler(dump_file, 1)
214 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
217 for file in files.keys():
219 for i in [ "package", "version", "architecture", "type", "size",
220 "md5sum", "component", "location id", "source package",
221 "source version", "maintainer", "dbtype", "files id",
222 "new", "section", "priority", "othercomponents",
223 "pool name", "original component" ]:
224 if files[file].has_key(i):
225 d_files[file][i] = files[file][i]
227 # Mandatory changes fields
228 for i in [ "distribution", "source", "architecture", "version",
229 "maintainer", "urgency", "fingerprint", "changedby822",
230 "changedby2047", "changedbyname", "maintainer822",
231 "maintainer2047", "maintainername", "maintaineremail",
232 "closes", "changes" ]:
233 d_changes[i] = changes[i]
234 # Optional changes fields
235 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236 if changes.has_key(i):
237 d_changes[i] = changes[i]
239 for i in [ "source", "version", "maintainer", "fingerprint",
240 "uploaders", "bts changelog", "dm-upload-allowed" ]:
244 for file in dsc_files.keys():
245 d_dsc_files[file] = {}
246 # Mandatory dsc_files fields
247 for i in [ "size", "md5sum" ]:
248 d_dsc_files[file][i] = dsc_files[file][i]
249 # Optional dsc_files fields
250 for i in [ "files id" ]:
251 if dsc_files[file].has_key(i):
252 d_dsc_files[file][i] = dsc_files[file][i]
254 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
259 ###########################################################################
261 # Set up the per-package template substitution mappings
263 def update_subst (self, reject_message = ""):
265 changes = self.pkg.changes
266 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268 changes["architecture"] = { "Unknown" : "" }
269 # and maintainer2047 may not exist.
270 if not changes.has_key("maintainer2047"):
271 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
273 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
277 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281 changes["maintainer2047"])
282 if "sponsoremail" in changes:
283 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
284 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
286 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
289 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
290 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
292 # Apply any global override of the Maintainer field
293 if self.Cnf.get("Dinstall::OverrideMaintainer"):
294 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
295 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
297 Subst["__REJECT_MESSAGE__"] = reject_message
298 Subst["__SOURCE__"] = changes.get("source", "Unknown")
299 Subst["__VERSION__"] = changes.get("version", "Unknown")
301 ###########################################################################
303 def build_summaries(self):
304 changes = self.pkg.changes
305 files = self.pkg.files
307 byhand = summary = new = ""
309 # changes["distribution"] may not exist in corner cases
310 # (e.g. unreadable changes files)
311 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
312 changes["distribution"] = {}
314 override_summary ="";
315 file_keys = files.keys()
317 for file in file_keys:
318 if files[file].has_key("byhand"):
320 summary += file + " byhand\n"
321 elif files[file].has_key("new"):
323 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
324 if files[file].has_key("othercomponents"):
325 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
326 if files[file]["type"] == "deb":
327 deb_fh = utils.open_file(file)
328 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
331 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
332 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
333 summary += file + "\n to " + destination + "\n"
334 if not files[file].has_key("type"):
335 files[file]["type"] = "unknown"
336 if files[file]["type"] in ["deb", "udeb", "dsc"]:
337 # (queue/unchecked), there we have override entries already, use them
338 # (process-new), there we dont have override entries, use the newly generated ones.
339 override_prio = files[file].get("override priority", files[file]["priority"])
340 override_sect = files[file].get("override section", files[file]["section"])
341 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
343 short_summary = summary
345 # This is for direport's benefit...
346 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
349 summary += "Changes: " + f
351 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
353 summary += self.announce(short_summary, 0)
355 return (summary, short_summary)
357 ###########################################################################
359 def close_bugs (self, summary, action):
360 changes = self.pkg.changes
364 bugs = changes["closes"].keys()
370 summary += "Closing bugs: "
372 summary += "%s " % (bug)
374 Subst["__BUG_NUMBER__"] = bug
375 if changes["distribution"].has_key("stable"):
376 Subst["__STABLE_WARNING__"] = """
377 Note that this package is not part of the released stable Debian
378 distribution. It may have dependencies on other unreleased software,
379 or other instabilities. Please take care if you wish to install it.
380 The update will eventually make its way into the next released Debian
383 Subst["__STABLE_WARNING__"] = ""
384 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
385 utils.send_mail (mail_message)
387 self.Logger.log(["closing bugs"]+bugs)
392 ###########################################################################
394 def announce (self, short_summary, action):
397 changes = self.pkg.changes
399 # Only do announcements for source uploads with a recent dpkg-dev installed
400 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
405 Subst["__SHORT_SUMMARY__"] = short_summary
407 for dist in changes["distribution"].keys():
408 list = Cnf.Find("Suite::%s::Announce" % (dist))
409 if list == "" or lists_done.has_key(list):
412 summary += "Announcing to %s\n" % (list)
415 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
416 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
417 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
418 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
419 utils.send_mail (mail_message)
421 if Cnf.FindB("Dinstall::CloseBugs"):
422 summary = self.close_bugs(summary, action)
426 ###########################################################################
428 def accept (self, summary, short_summary):
431 files = self.pkg.files
432 changes = self.pkg.changes
433 changes_file = self.pkg.changes_file
437 self.Logger.log(["Accepting changes",changes_file])
439 self.dump_vars(Cnf["Dir::Queue::Accepted"])
441 # Move all the files into the accepted directory
442 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
443 file_keys = files.keys()
444 for file in file_keys:
445 utils.move(file, Cnf["Dir::Queue::Accepted"])
446 self.accept_bytes += float(files[file]["size"])
447 self.accept_count += 1
449 # Send accept mail, announce to lists, close bugs and check for
450 # override disparities
451 if not Cnf["Dinstall::Options::No-Mail"]:
452 Subst["__SUITE__"] = ""
453 Subst["__SUMMARY__"] = summary
454 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
455 utils.send_mail(mail_message)
456 self.announce(short_summary, 1)
459 ## Helper stuff for DebBugs Version Tracking
460 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
461 # ??? once queue/* is cleared on *.d.o and/or reprocessed
462 # the conditionalization on dsc["bts changelog"] should be
465 # Write out the version history from the changelog
466 if changes["architecture"].has_key("source") and \
467 dsc.has_key("bts changelog"):
469 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
470 dotprefix=1, perms=0644)
471 version_history = utils.open_file(temp_filename, 'w')
472 version_history.write(dsc["bts changelog"])
473 version_history.close()
474 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
475 changes_file[:-8]+".versions")
476 os.rename(temp_filename, filename)
478 # Write out the binary -> source mapping.
479 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
480 dotprefix=1, perms=0644)
481 debinfo = utils.open_file(temp_filename, 'w')
482 for file in file_keys:
484 if f["type"] == "deb":
485 line = " ".join([f["package"], f["version"],
486 f["architecture"], f["source package"],
487 f["source version"]])
488 debinfo.write(line+"\n")
490 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
491 changes_file[:-8]+".debinfo")
492 os.rename(temp_filename, filename)
494 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
496 ###########################################################################
498 def queue_build (self, queue, path):
501 files = self.pkg.files
502 changes = self.pkg.changes
503 changes_file = self.pkg.changes_file
505 file_keys = files.keys()
507 ## Special support to enable clean auto-building of queued packages
508 queue_id = database.get_or_set_queue_id(queue)
510 self.projectB.query("BEGIN WORK")
511 for suite in changes["distribution"].keys():
512 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
514 suite_id = database.get_suite_id(suite)
515 dest_dir = Cnf["Dir::QueueBuild"]
516 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
517 dest_dir = os.path.join(dest_dir, suite)
518 for file in file_keys:
519 src = os.path.join(path, file)
520 dest = os.path.join(dest_dir, file)
521 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
522 # Copy it since the original won't be readable by www-data
523 utils.copy(src, dest)
525 # Create a symlink to it
526 os.symlink(src, dest)
527 # Add it to the list of packages for later processing by apt-ftparchive
528 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
529 # If the .orig.tar.gz is in the pool, create a symlink to
530 # it (if one doesn't already exist)
531 if self.pkg.orig_tar_id:
532 # Determine the .orig.tar.gz file name
533 for dsc_file in self.pkg.dsc_files.keys():
534 if dsc_file.endswith(".orig.tar.gz"):
536 dest = os.path.join(dest_dir, filename)
537 # If it doesn't exist, create a symlink
538 if not os.path.exists(dest):
539 # Find the .orig.tar.gz in the pool
540 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
543 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
544 src = os.path.join(ql[0][0], ql[0][1])
545 os.symlink(src, dest)
546 # Add it to the list of packages for later processing by apt-ftparchive
547 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
548 # if it does, update things to ensure it's not removed prematurely
550 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
552 self.projectB.query("COMMIT WORK")
554 ###########################################################################
556 def check_override (self):
558 changes = self.pkg.changes
559 files = self.pkg.files
562 # Abandon the check if:
563 # a) it's a non-sourceful upload
564 # b) override disparity checks have been disabled
565 # c) we're not sending mail
566 if not changes["architecture"].has_key("source") or \
567 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
568 Cnf["Dinstall::Options::No-Mail"]:
572 file_keys = files.keys()
574 for file in file_keys:
575 if not files[file].has_key("new") and files[file]["type"] == "deb":
576 section = files[file]["section"]
577 override_section = files[file]["override section"]
578 if section.lower() != override_section.lower() and section != "-":
579 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
580 priority = files[file]["priority"]
581 override_priority = files[file]["override priority"]
582 if priority != override_priority and priority != "-":
583 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
588 Subst["__SUMMARY__"] = summary
589 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
590 utils.send_mail(mail_message)
592 ###########################################################################
594 def force_reject (self, files):
595 """Forcefully move files from the current directory to the
596 reject directory. If any file already exists in the reject
597 directory it will be moved to the morgue to make way for
603 # Skip any files which don't exist or which we don't have permission to copy.
604 if os.access(file,os.R_OK) == 0:
606 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
608 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
610 # File exists? Let's try and move it to the morgue
611 if errno.errorcode[e.errno] == 'EEXIST':
612 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
614 morgue_file = utils.find_next_free(morgue_file)
615 except utils.tried_too_hard_exc:
616 # Something's either gone badly Pete Tong, or
617 # someone is trying to exploit us.
618 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
620 utils.move(dest_file, morgue_file, perms=0660)
622 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
625 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
629 # If we got here, we own the destination file, so we can
630 # safely overwrite it.
631 utils.move(file, dest_file, 1, perms=0660)
634 ###########################################################################
636 def do_reject (self, manual = 0, reject_message = ""):
637 # If we weren't given a manual rejection message, spawn an
638 # editor so the user can add one in...
639 if manual and not reject_message:
640 temp_filename = utils.temp_filename()
641 editor = os.environ.get("EDITOR","vi")
644 os.system("%s %s" % (editor, temp_filename))
645 temp_fh = utils.open_file(temp_filename)
646 reject_message = "".join(temp_fh.readlines())
648 print "Reject message:"
649 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
650 prompt = "[R]eject, Edit, Abandon, Quit ?"
652 while prompt.find(answer) == -1:
653 answer = utils.our_raw_input(prompt)
654 m = re_default_answer.search(prompt)
657 answer = answer[:1].upper()
658 os.unlink(temp_filename)
670 reason_filename = pkg.changes_file[:-8] + ".reason"
671 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
673 # Move all the files into the reject directory
674 reject_files = pkg.files.keys() + [pkg.changes_file]
675 self.force_reject(reject_files)
677 # If we fail here someone is probably trying to exploit the race
678 # so let's just raise an exception ...
679 if os.path.exists(reason_filename):
680 os.unlink(reason_filename)
681 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
684 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
685 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
686 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
687 os.write(reason_fd, reject_message)
688 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
690 # Build up the rejection email
691 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
693 Subst["__REJECTOR_ADDRESS__"] = user_email_address
694 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
695 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
696 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
697 # Write the rejection email out as the <foo>.reason file
698 os.write(reason_fd, reject_mail_message)
702 # Send the rejection mail if appropriate
703 if not Cnf["Dinstall::Options::No-Mail"]:
704 utils.send_mail(reject_mail_message)
706 self.Logger.log(["rejected", pkg.changes_file])
709 ################################################################################
711 # Ensure that source exists somewhere in the archive for the binary
712 # upload being processed.
714 # (1) exact match => 1.0-3
715 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
717 def source_exists (self, package, source_version, suites = ["any"]):
721 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
724 # source must exist in suite X, or in some other suite that's
725 # mapped to X, recursively... silent-maps are counted too,
726 # unreleased-maps aren't.
727 maps = self.Cnf.ValueList("SuiteMappings")[:]
729 maps = [ m.split() for m in maps ]
730 maps = [ (x[1], x[2]) for x in maps
731 if x[0] == "map" or x[0] == "silent-map" ]
734 if x[1] in s and x[0] not in s:
737 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
738 q = self.projectB.query(que)
740 # Reduce the query results to a list of version numbers
741 ql = [ i[0] for i in q.getresult() ]
744 if source_version in ql:
748 orig_source_version = re_bin_only_nmu.sub('', source_version)
749 if orig_source_version in ql:
757 ################################################################################
759 def in_override_p (self, package, component, suite, binary_type, file):
760 files = self.pkg.files
762 if binary_type == "": # must be source
767 # Override suite name; used for example with proposed-updates
768 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
769 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
771 # Avoid <undef> on unknown distributions
772 suite_id = database.get_suite_id(suite)
775 component_id = database.get_component_id(component)
776 type_id = database.get_override_type_id(type)
778 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
779 % (package, suite_id, component_id, type_id))
780 result = q.getresult()
781 # If checking for a source package fall back on the binary override type
782 if type == "dsc" and not result:
783 deb_type_id = database.get_override_type_id("deb")
784 udeb_type_id = database.get_override_type_id("udeb")
785 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
786 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
787 result = q.getresult()
789 # Remember the section and priority so we can check them later if appropriate
791 files[file]["override section"] = result[0][0]
792 files[file]["override priority"] = result[0][1]
796 ################################################################################
798 def reject (self, str, prefix="Rejected: "):
800 # Unlike other rejects we add new lines first to avoid trailing
801 # new lines when this message is passed back up to a caller.
802 if self.reject_message:
803 self.reject_message += "\n"
804 self.reject_message += prefix + str
806 ################################################################################
808 def get_anyversion(self, query_result, suite):
810 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
811 for (v, s) in query_result:
812 if s in [ x.lower() for x in anysuite ]:
813 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
817 ################################################################################
819 def cross_suite_version_check(self, query_result, file, new_version):
820 """Ensure versions are newer than existing packages in target
821 suites and that cross-suite version checking rules as
822 set out in the conf file are satisfied."""
824 # Check versions for each target suite
825 for target_suite in self.pkg.changes["distribution"].keys():
826 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
827 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
828 # Enforce "must be newer than target suite" even if conffile omits it
829 if target_suite not in must_be_newer_than:
830 must_be_newer_than.append(target_suite)
831 for entry in query_result:
832 existent_version = entry[0]
834 if suite in must_be_newer_than and \
835 apt_pkg.VersionCompare(new_version, existent_version) < 1:
836 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
837 if suite in must_be_older_than and \
838 apt_pkg.VersionCompare(new_version, existent_version) > -1:
839 ch = self.pkg.changes
841 if ch.get('distribution-version', {}).has_key(suite):
842 # we really use the other suite, ignoring the conflicting one ...
843 addsuite = ch["distribution-version"][suite]
845 add_version = self.get_anyversion(query_result, addsuite)
846 target_version = self.get_anyversion(query_result, target_suite)
849 # not add_version can only happen if we map to a suite
850 # that doesn't enhance the suite we're propup'ing from.
851 # so "propup-ver x a b c; map a d" is a problem only if
852 # d doesn't enhance a.
854 # i think we could always propagate in this case, rather
855 # than complaining. either way, this isn't a REJECT issue
857 # And - we really should complain to the dorks who configured dak
858 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
859 self.pkg.changes.setdefault("propdistribution", {})
860 self.pkg.changes["propdistribution"][addsuite] = 1
862 elif not target_version:
863 # not targets_version is true when the package is NEW
864 # we could just stick with the "...old version..." REJECT
866 self.reject("Won't propogate NEW packages.")
867 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
868 # propogation would be redundant. no need to reject though.
869 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
871 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
872 apt_pkg.VersionCompare(add_version, target_version) >= 0:
874 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
875 self.pkg.changes.setdefault("propdistribution", {})
876 self.pkg.changes["propdistribution"][addsuite] = 1
880 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
882 ################################################################################
884 def check_binary_against_db(self, file):
885 self.reject_message = ""
886 files = self.pkg.files
888 # Ensure version is sane
889 q = self.projectB.query("""
890 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
892 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
893 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
894 % (files[file]["package"],
895 files[file]["architecture"]))
896 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
898 # Check for any existing copies of the file
899 q = self.projectB.query("""
900 SELECT b.id FROM binaries b, architecture a
901 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
902 AND a.id = b.architecture"""
903 % (files[file]["package"],
904 files[file]["version"],
905 files[file]["architecture"]))
907 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
909 return self.reject_message
911 ################################################################################
913 def check_source_against_db(self, file):
914 self.reject_message = ""
917 # Ensure version is sane
918 q = self.projectB.query("""
919 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
920 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
921 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
923 return self.reject_message
925 ################################################################################
928 # NB: this function can remove entries from the 'files' index [if
929 # the .orig.tar.gz is a duplicate of the one in the archive]; if
930 # you're iterating over 'files' and call this function as part of
931 # the loop, be sure to add a check to the top of the loop to
932 # ensure you haven't just tried to dereference the deleted entry.
935 def check_dsc_against_db(self, file):
936 self.reject_message = ""
937 files = self.pkg.files
938 dsc_files = self.pkg.dsc_files
939 legacy_source_untouchable = self.pkg.legacy_source_untouchable
940 self.pkg.orig_tar_gz = None
942 # Try and find all files mentioned in the .dsc. This has
943 # to work harder to cope with the multiple possible
944 # locations of an .orig.tar.gz.
945 # The ordering on the select is needed to pick the newest orig
946 # when it exists in multiple places.
947 for dsc_file in dsc_files.keys():
949 if files.has_key(dsc_file):
950 actual_md5 = files[dsc_file]["md5sum"]
951 actual_size = int(files[dsc_file]["size"])
952 found = "%s in incoming" % (dsc_file)
953 # Check the file does not already exist in the archive
954 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
956 # Strip out anything that isn't '%s' or '/%s$'
958 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
961 # "[dak] has not broken them. [dak] has fixed a
962 # brokenness. Your crappy hack exploited a bug in
965 # "(Come on! I thought it was always obvious that
966 # one just doesn't release different files with
967 # the same name and version.)"
968 # -- ajk@ on d-devel@l.d.o
971 # Ignore exact matches for .orig.tar.gz
973 if dsc_file.endswith(".orig.tar.gz"):
975 if files.has_key(dsc_file) and \
976 int(files[dsc_file]["size"]) == int(i[0]) and \
977 files[dsc_file]["md5sum"] == i[1]:
978 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
980 self.pkg.orig_tar_gz = i[2] + i[3]
984 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
985 elif dsc_file.endswith(".orig.tar.gz"):
987 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
989 # Strip out anything that isn't '%s' or '/%s$'
991 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
995 # Unfortunately, we may get more than one match here if,
996 # for example, the package was in potato but had an -sa
997 # upload in woody. So we need to choose the right one.
999 x = ql[0]; # default to something sane in case we don't match any or have only one
1003 old_file = i[0] + i[1]
1004 old_file_fh = utils.open_file(old_file)
1005 actual_md5 = apt_pkg.md5sum(old_file_fh)
1007 actual_size = os.stat(old_file)[stat.ST_SIZE]
1008 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1011 legacy_source_untouchable[i[3]] = ""
1013 old_file = x[0] + x[1]
1014 old_file_fh = utils.open_file(old_file)
1015 actual_md5 = apt_pkg.md5sum(old_file_fh)
1017 actual_size = os.stat(old_file)[stat.ST_SIZE]
1020 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1021 # See install() in process-accepted...
1022 self.pkg.orig_tar_id = x[3]
1023 self.pkg.orig_tar_gz = old_file
1024 if suite_type == "legacy" or suite_type == "legacy-mixed":
1025 self.pkg.orig_tar_location = "legacy"
1027 self.pkg.orig_tar_location = x[4]
1029 # Not there? Check the queue directories...
1031 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1032 # See process_it() in 'dak process-unchecked' for explanation of this
1033 # in_unchecked check dropped by ajt 2007-08-28, how did that
1035 if os.path.exists(in_unchecked) and False:
1036 return (self.reject_message, in_unchecked)
1038 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1039 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1040 if os.path.exists(in_otherdir):
1041 in_otherdir_fh = utils.open_file(in_otherdir)
1042 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1043 in_otherdir_fh.close()
1044 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1046 self.pkg.orig_tar_gz = in_otherdir
1049 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1050 self.pkg.orig_tar_gz = -1
1053 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1055 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1056 self.reject("md5sum for %s doesn't match %s." % (found, file))
1057 if actual_size != int(dsc_files[dsc_file]["size"]):
1058 self.reject("size for %s doesn't match %s." % (found, file))
1060 return (self.reject_message, None)
1062 def do_query(self, q):
1063 sys.stderr.write("query: \"%s\" ... " % (q))
1064 before = time.time()
1065 r = self.projectB.query(q)
1066 time_diff = time.time()-before
1067 sys.stderr.write("took %.3f seconds.\n" % (time_diff))