3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
49 # Read in the group maintainer override file
50 def __init__ (self, Cnf):
53 if Cnf.get("Dinstall::GroupOverrideFilename"):
54 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55 file = utils.open_file(filename)
56 for line in file.readlines():
57 line = utils.re_comments.sub('', line).lower().strip()
59 self.group_maint[line] = 1
62 def is_an_nmu (self, pkg):
67 i = utils.fix_maintainer (dsc.get("maintainer",
68 Cnf["Dinstall::MyEmailAddress"]).lower())
69 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",")
79 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80 uploadernames[name] = ""
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
99 self.accept_bytes = 0L
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {})
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {}
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111 database.init(Cnf, self.projectB)
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i)
118 self.pkg.orig_tar_id = None
119 self.pkg.orig_tar_location = ""
120 self.pkg.orig_tar_gz = None
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".dak"
126 dump_file = utils.open_file(dump_filename)
127 p = cPickle.Unpickler(dump_file)
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i)
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i)
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to
137 # avoid this so there's some idea of what process-accepted &
138 # process-new use from process-unchecked
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i)
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145 dump_file = utils.open_file(dump_filename, 'w')
147 os.chmod(dump_filename, 0660)
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
156 p = cPickle.Pickler(dump_file, 1)
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i]
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i]
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i]
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {}
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i]
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i]
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes
209 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" }
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"])
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
238 Subst["__REJECT_MESSAGE__"] = reject_message
239 Subst["__SOURCE__"] = changes.get("source", "Unknown")
240 Subst["__VERSION__"] = changes.get("version", "Unknown")
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes
246 files = self.pkg.files
248 byhand = summary = new = ""
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {}
255 override_summary ="";
256 file_keys = files.keys()
258 for file in file_keys:
259 if files[file].has_key("byhand"):
261 summary += file + " byhand\n"
262 elif files[file].has_key("new"):
264 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265 if files[file].has_key("othercomponents"):
266 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267 if files[file]["type"] == "deb":
268 deb_fh = utils.open_file(file)
269 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
272 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274 summary += file + "\n to " + destination + "\n"
275 if files[file]["type"] in ["deb", "udeb", "dsc"]:
276 override_summary += "%s - %s %s\n" % (file, files[file]["priority"], files[file]["section"])
278 short_summary = summary
280 # This is for direport's benefit...
281 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
284 summary += "Changes: " + f
286 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
288 summary += self.announce(short_summary, 0)
290 return (summary, short_summary)
292 ###########################################################################
294 def close_bugs (self, summary, action):
295 changes = self.pkg.changes
299 bugs = changes["closes"].keys()
305 if not self.nmu.is_an_nmu(self.pkg):
306 if changes["distribution"].has_key("experimental"):
307 # tag bugs as fixed-in-experimental for uploads to experimental
308 summary += "Setting bugs to severity fixed: "
311 summary += "%s " % (bug)
312 control_message += "tag %s + fixed-in-experimental\n" % (bug)
313 if action and control_message != "":
314 Subst["__CONTROL_MESSAGE__"] = control_message
315 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
316 utils.send_mail (mail_message)
318 self.Logger.log(["setting bugs to fixed"]+bugs)
322 summary += "Closing bugs: "
324 summary += "%s " % (bug)
326 Subst["__BUG_NUMBER__"] = bug
327 if changes["distribution"].has_key("stable"):
328 Subst["__STABLE_WARNING__"] = """
329 Note that this package is not part of the released stable Debian
330 distribution. It may have dependencies on other unreleased software,
331 or other instabilities. Please take care if you wish to install it.
332 The update will eventually make its way into the next released Debian
335 Subst["__STABLE_WARNING__"] = ""
336 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
337 utils.send_mail (mail_message)
339 self.Logger.log(["closing bugs"]+bugs)
342 summary += "Setting bugs to severity fixed: "
345 summary += "%s " % (bug)
346 control_message += "tag %s + fixed\n" % (bug)
347 if action and control_message != "":
348 Subst["__CONTROL_MESSAGE__"] = control_message
349 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
350 utils.send_mail (mail_message)
352 self.Logger.log(["setting bugs to fixed"]+bugs)
356 ###########################################################################
358 def announce (self, short_summary, action):
361 changes = self.pkg.changes
363 # Only do announcements for source uploads with a recent dpkg-dev installed
364 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
369 Subst["__SHORT_SUMMARY__"] = short_summary
371 for dist in changes["distribution"].keys():
372 list = Cnf.Find("Suite::%s::Announce" % (dist))
373 if list == "" or lists_done.has_key(list):
376 summary += "Announcing to %s\n" % (list)
379 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
380 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
381 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
382 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
383 utils.send_mail (mail_message)
385 if Cnf.FindB("Dinstall::CloseBugs"):
386 summary = self.close_bugs(summary, action)
390 ###########################################################################
392 def accept (self, summary, short_summary):
395 files = self.pkg.files
396 changes = self.pkg.changes
397 changes_file = self.pkg.changes_file
401 self.Logger.log(["Accepting changes",changes_file])
403 self.dump_vars(Cnf["Dir::Queue::Accepted"])
405 # Move all the files into the accepted directory
406 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
407 file_keys = files.keys()
408 for file in file_keys:
409 utils.move(file, Cnf["Dir::Queue::Accepted"])
410 self.accept_bytes += float(files[file]["size"])
411 self.accept_count += 1
413 # Send accept mail, announce to lists, close bugs and check for
414 # override disparities
415 if not Cnf["Dinstall::Options::No-Mail"]:
416 Subst["__SUITE__"] = ""
417 Subst["__SUMMARY__"] = summary
418 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
419 utils.send_mail(mail_message)
420 self.announce(short_summary, 1)
423 ## Helper stuff for DebBugs Version Tracking
424 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
425 # ??? once queue/* is cleared on *.d.o and/or reprocessed
426 # the conditionalization on dsc["bts changelog"] should be
429 # Write out the version history from the changelog
430 if changes["architecture"].has_key("source") and \
431 dsc.has_key("bts changelog"):
433 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
434 dotprefix=1, perms=0644)
435 version_history = utils.open_file(temp_filename, 'w')
436 version_history.write(dsc["bts changelog"])
437 version_history.close()
438 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
439 changes_file[:-8]+".versions")
440 os.rename(temp_filename, filename)
442 # Write out the binary -> source mapping.
443 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
444 dotprefix=1, perms=0644)
445 debinfo = utils.open_file(temp_filename, 'w')
446 for file in file_keys:
448 if f["type"] == "deb":
449 line = " ".join([f["package"], f["version"],
450 f["architecture"], f["source package"],
451 f["source version"]])
452 debinfo.write(line+"\n")
454 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
455 changes_file[:-8]+".debinfo")
456 os.rename(temp_filename, filename)
458 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
460 ###########################################################################
462 def queue_build (self, queue, path):
465 files = self.pkg.files
466 changes = self.pkg.changes
467 changes_file = self.pkg.changes_file
469 file_keys = files.keys()
471 ## Special support to enable clean auto-building of queued packages
472 queue_id = database.get_or_set_queue_id(queue)
474 self.projectB.query("BEGIN WORK")
475 for suite in changes["distribution"].keys():
476 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
478 suite_id = database.get_suite_id(suite)
479 dest_dir = Cnf["Dir::QueueBuild"]
480 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
481 dest_dir = os.path.join(dest_dir, suite)
482 for file in file_keys:
483 src = os.path.join(path, file)
484 dest = os.path.join(dest_dir, file)
485 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
486 # Copy it since the original won't be readable by www-data
487 utils.copy(src, dest)
489 # Create a symlink to it
490 os.symlink(src, dest)
491 # Add it to the list of packages for later processing by apt-ftparchive
492 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
493 # If the .orig.tar.gz is in the pool, create a symlink to
494 # it (if one doesn't already exist)
495 if self.pkg.orig_tar_id:
496 # Determine the .orig.tar.gz file name
497 for dsc_file in self.pkg.dsc_files.keys():
498 if dsc_file.endswith(".orig.tar.gz"):
500 dest = os.path.join(dest_dir, filename)
501 # If it doesn't exist, create a symlink
502 if not os.path.exists(dest):
503 # Find the .orig.tar.gz in the pool
504 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
507 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
508 src = os.path.join(ql[0][0], ql[0][1])
509 os.symlink(src, dest)
510 # Add it to the list of packages for later processing by apt-ftparchive
511 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
512 # if it does, update things to ensure it's not removed prematurely
514 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
516 self.projectB.query("COMMIT WORK")
518 ###########################################################################
520 def check_override (self):
522 changes = self.pkg.changes
523 files = self.pkg.files
526 # Abandon the check if:
527 # a) it's a non-sourceful upload
528 # b) override disparity checks have been disabled
529 # c) we're not sending mail
530 if not changes["architecture"].has_key("source") or \
531 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
532 Cnf["Dinstall::Options::No-Mail"]:
536 file_keys = files.keys()
538 for file in file_keys:
539 if not files[file].has_key("new") and files[file]["type"] == "deb":
540 section = files[file]["section"]
541 override_section = files[file]["override section"]
542 if section.lower() != override_section.lower() and section != "-":
543 # Ignore this; it's a common mistake and not worth whining about
544 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
546 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
547 priority = files[file]["priority"]
548 override_priority = files[file]["override priority"]
549 if priority != override_priority and priority != "-":
550 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
555 Subst["__SUMMARY__"] = summary
556 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
557 utils.send_mail(mail_message)
559 ###########################################################################
561 def force_reject (self, files):
562 """Forcefully move files from the current directory to the
563 reject directory. If any file already exists in the reject
564 directory it will be moved to the morgue to make way for
570 # Skip any files which don't exist or which we don't have permission to copy.
571 if os.access(file,os.R_OK) == 0:
573 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
575 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
577 # File exists? Let's try and move it to the morgue
578 if errno.errorcode[e.errno] == 'EEXIST':
579 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
581 morgue_file = utils.find_next_free(morgue_file)
582 except utils.tried_too_hard_exc:
583 # Something's either gone badly Pete Tong, or
584 # someone is trying to exploit us.
585 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
587 utils.move(dest_file, morgue_file, perms=0660)
589 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
592 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
596 # If we got here, we own the destination file, so we can
597 # safely overwrite it.
598 utils.move(file, dest_file, 1, perms=0660)
601 ###########################################################################
603 def do_reject (self, manual = 0, reject_message = ""):
604 # If we weren't given a manual rejection message, spawn an
605 # editor so the user can add one in...
606 if manual and not reject_message:
607 temp_filename = utils.temp_filename()
608 editor = os.environ.get("EDITOR","vi")
611 os.system("%s %s" % (editor, temp_filename))
612 temp_fh = utils.open_file(temp_filename)
613 reject_message = "".join(temp_fh.readlines())
615 print "Reject message:"
616 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
617 prompt = "[R]eject, Edit, Abandon, Quit ?"
619 while prompt.find(answer) == -1:
620 answer = utils.our_raw_input(prompt)
621 m = re_default_answer.search(prompt)
624 answer = answer[:1].upper()
625 os.unlink(temp_filename)
637 reason_filename = pkg.changes_file[:-8] + ".reason"
638 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
640 # Move all the files into the reject directory
641 reject_files = pkg.files.keys() + [pkg.changes_file]
642 self.force_reject(reject_files)
644 # If we fail here someone is probably trying to exploit the race
645 # so let's just raise an exception ...
646 if os.path.exists(reason_filename):
647 os.unlink(reason_filename)
648 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
651 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
652 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
653 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
654 os.write(reason_fd, reject_message)
655 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
657 # Build up the rejection email
658 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
660 Subst["__REJECTOR_ADDRESS__"] = user_email_address
661 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
662 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
663 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
664 # Write the rejection email out as the <foo>.reason file
665 os.write(reason_fd, reject_mail_message)
669 # Send the rejection mail if appropriate
670 if not Cnf["Dinstall::Options::No-Mail"]:
671 utils.send_mail(reject_mail_message)
673 self.Logger.log(["rejected", pkg.changes_file])
676 ################################################################################
678 # Ensure that source exists somewhere in the archive for the binary
679 # upload being processed.
681 # (1) exact match => 1.0-3
682 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
684 def source_exists (self, package, source_version, suites = ["any"]):
688 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
691 # source must exist in suite X, or in some other suite that's
692 # mapped to X, recursively... silent-maps are counted too,
693 # unreleased-maps aren't.
694 maps = self.Cnf.ValueList("SuiteMappings")[:]
696 maps = [ m.split() for m in maps ]
697 maps = [ (x[1], x[2]) for x in maps
698 if x[0] == "map" or x[0] == "silent-map" ]
701 if x[1] in s and x[0] not in s:
704 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
705 q = self.projectB.query(que)
707 # Reduce the query results to a list of version numbers
708 ql = [ i[0] for i in q.getresult() ]
711 if source_version in ql:
715 orig_source_version = re_bin_only_nmu.sub('', source_version)
716 if orig_source_version in ql:
724 ################################################################################
726 def in_override_p (self, package, component, suite, binary_type, file):
727 files = self.pkg.files
729 if binary_type == "": # must be source
734 # Override suite name; used for example with proposed-updates
735 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
736 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
738 # Avoid <undef> on unknown distributions
739 suite_id = database.get_suite_id(suite)
742 component_id = database.get_component_id(component)
743 type_id = database.get_override_type_id(type)
745 # FIXME: nasty non-US speficic hack
746 if component.lower().startswith("non-us/"):
747 component = component[7:]
749 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
750 % (package, suite_id, component_id, type_id))
751 result = q.getresult()
752 # If checking for a source package fall back on the binary override type
753 if type == "dsc" and not result:
754 deb_type_id = database.get_override_type_id("deb")
755 udeb_type_id = database.get_override_type_id("udeb")
756 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
757 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
758 result = q.getresult()
760 # Remember the section and priority so we can check them later if appropriate
762 files[file]["override section"] = result[0][0]
763 files[file]["override priority"] = result[0][1]
767 ################################################################################
769 def reject (self, str, prefix="Rejected: "):
771 # Unlike other rejects we add new lines first to avoid trailing
772 # new lines when this message is passed back up to a caller.
773 if self.reject_message:
774 self.reject_message += "\n"
775 self.reject_message += prefix + str
777 ################################################################################
779 def get_anyversion(self, query_result, suite):
781 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
782 for (v, s) in query_result:
783 if s in [ x.lower() for x in anysuite ]:
784 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
788 ################################################################################
790 def cross_suite_version_check(self, query_result, file, new_version):
791 """Ensure versions are newer than existing packages in target
792 suites and that cross-suite version checking rules as
793 set out in the conf file are satisfied."""
795 # Check versions for each target suite
796 for target_suite in self.pkg.changes["distribution"].keys():
797 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
798 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
799 # Enforce "must be newer than target suite" even if conffile omits it
800 if target_suite not in must_be_newer_than:
801 must_be_newer_than.append(target_suite)
802 for entry in query_result:
803 existent_version = entry[0]
805 if suite in must_be_newer_than and \
806 apt_pkg.VersionCompare(new_version, existent_version) < 1:
807 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
808 if suite in must_be_older_than and \
809 apt_pkg.VersionCompare(new_version, existent_version) > -1:
810 ch = self.pkg.changes
812 if ch.get('distribution-version', {}).has_key(suite):
813 # we really use the other suite, ignoring the conflicting one ...
814 addsuite = ch["distribution-version"][suite]
816 add_version = self.get_anyversion(query_result, addsuite)
817 target_version = self.get_anyversion(query_result, target_suite)
820 # not add_version can only happen if we map to a suite
821 # that doesn't enhance the suite we're propup'ing from.
822 # so "propup-ver x a b c; map a d" is a problem only if
823 # d doesn't enhance a.
825 # i think we could always propagate in this case, rather
826 # than complaining. either way, this isn't a REJECT issue
828 # And - we really should complain to the dorks who configured dak
829 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
830 self.pkg.changes.setdefault("propdistribution", {})
831 self.pkg.changes["propdistribution"][addsuite] = 1
833 elif not target_version:
834 # not targets_version is true when the package is NEW
835 # we could just stick with the "...old version..." REJECT
837 self.reject("Won't propogate NEW packages.")
838 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
839 # propogation would be redundant. no need to reject though.
840 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
842 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
843 apt_pkg.VersionCompare(add_version, target_version) >= 0:
845 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
846 self.pkg.changes.setdefault("propdistribution", {})
847 self.pkg.changes["propdistribution"][addsuite] = 1
851 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
853 ################################################################################
855 def check_binary_against_db(self, file):
856 self.reject_message = ""
857 files = self.pkg.files
859 # Ensure version is sane
860 q = self.projectB.query("""
861 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
863 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
864 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
865 % (files[file]["package"],
866 files[file]["architecture"]))
867 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
869 # Check for any existing copies of the file
870 q = self.projectB.query("""
871 SELECT b.id FROM binaries b, architecture a
872 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
873 AND a.id = b.architecture"""
874 % (files[file]["package"],
875 files[file]["version"],
876 files[file]["architecture"]))
878 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
880 return self.reject_message
882 ################################################################################
884 def check_source_against_db(self, file):
885 self.reject_message = ""
888 # Ensure version is sane
889 q = self.projectB.query("""
890 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
891 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
892 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
894 return self.reject_message
896 ################################################################################
899 # NB: this function can remove entries from the 'files' index [if
900 # the .orig.tar.gz is a duplicate of the one in the archive]; if
901 # you're iterating over 'files' and call this function as part of
902 # the loop, be sure to add a check to the top of the loop to
903 # ensure you haven't just tried to derefernece the deleted entry.
906 def check_dsc_against_db(self, file):
907 self.reject_message = ""
908 files = self.pkg.files
909 dsc_files = self.pkg.dsc_files
910 legacy_source_untouchable = self.pkg.legacy_source_untouchable
911 self.pkg.orig_tar_gz = None
913 # Try and find all files mentioned in the .dsc. This has
914 # to work harder to cope with the multiple possible
915 # locations of an .orig.tar.gz.
916 for dsc_file in dsc_files.keys():
918 if files.has_key(dsc_file):
919 actual_md5 = files[dsc_file]["md5sum"]
920 actual_size = int(files[dsc_file]["size"])
921 found = "%s in incoming" % (dsc_file)
922 # Check the file does not already exist in the archive
923 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
925 # Strip out anything that isn't '%s' or '/%s$'
927 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
930 # "[dak] has not broken them. [dak] has fixed a
931 # brokenness. Your crappy hack exploited a bug in
934 # "(Come on! I thought it was always obvious that
935 # one just doesn't release different files with
936 # the same name and version.)"
937 # -- ajk@ on d-devel@l.d.o
940 # Ignore exact matches for .orig.tar.gz
942 if dsc_file.endswith(".orig.tar.gz"):
944 if files.has_key(dsc_file) and \
945 int(files[dsc_file]["size"]) == int(i[0]) and \
946 files[dsc_file]["md5sum"] == i[1]:
947 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
949 self.pkg.orig_tar_gz = i[2] + i[3]
953 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
954 elif dsc_file.endswith(".orig.tar.gz"):
956 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
958 # Strip out anything that isn't '%s' or '/%s$'
960 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
964 # Unfortunately, we may get more than one match here if,
965 # for example, the package was in potato but had an -sa
966 # upload in woody. So we need to choose the right one.
968 x = ql[0]; # default to something sane in case we don't match any or have only one
972 old_file = i[0] + i[1]
973 old_file_fh = utils.open_file(old_file)
974 actual_md5 = apt_pkg.md5sum(old_file_fh)
976 actual_size = os.stat(old_file)[stat.ST_SIZE]
977 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
980 legacy_source_untouchable[i[3]] = ""
982 old_file = x[0] + x[1]
983 old_file_fh = utils.open_file(old_file)
984 actual_md5 = apt_pkg.md5sum(old_file_fh)
986 actual_size = os.stat(old_file)[stat.ST_SIZE]
989 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
990 # See install() in process-accepted...
991 self.pkg.orig_tar_id = x[3]
992 self.pkg.orig_tar_gz = old_file
993 if suite_type == "legacy" or suite_type == "legacy-mixed":
994 self.pkg.orig_tar_location = "legacy"
996 self.pkg.orig_tar_location = x[4]
998 # Not there? Check the queue directories...
1000 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1001 # See process_it() in 'dak process-unchecked' for explanation of this
1002 if os.path.exists(in_unchecked):
1003 return (self.reject_message, in_unchecked)
1005 for dir in [ "Accepted", "New", "Byhand" ]:
1006 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1007 if os.path.exists(in_otherdir):
1008 in_otherdir_fh = utils.open_file(in_otherdir)
1009 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1010 in_otherdir_fh.close()
1011 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1013 self.pkg.orig_tar_gz = in_otherdir
1016 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1017 self.pkg.orig_tar_gz = -1
1020 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1022 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1023 self.reject("md5sum for %s doesn't match %s." % (found, file))
1024 if actual_size != int(dsc_files[dsc_file]["size"]):
1025 self.reject("size for %s doesn't match %s." % (found, file))
1027 return (self.reject_message, None)
1029 def do_query(self, q):
1030 sys.stderr.write("query: \"%s\" ... " % (q))
1031 before = time.time()
1032 r = self.projectB.query(q)
1033 time_diff = time.time()-before
1034 sys.stderr.write("took %.3f seconds.\n" % (time_diff))