3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
49 # Read in the group maintainer override file
50 def __init__ (self, Cnf):
53 if Cnf.get("Dinstall::GroupOverrideFilename"):
54 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55 file = utils.open_file(filename)
56 for line in file.readlines():
57 line = utils.re_comments.sub('', line).lower().strip()
59 self.group_maint[line] = 1
62 def is_an_nmu (self, pkg):
67 i = utils.fix_maintainer (dsc.get("maintainer",
68 Cnf["Dinstall::MyEmailAddress"]).lower())
69 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",")
79 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80 uploadernames[name] = ""
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
99 self.accept_bytes = 0L
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {})
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {}
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111 database.init(Cnf, self.projectB)
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i)
118 self.pkg.orig_tar_id = None
119 self.pkg.orig_tar_location = ""
120 self.pkg.orig_tar_gz = None
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".dak"
126 dump_file = utils.open_file(dump_filename)
127 p = cPickle.Unpickler(dump_file)
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i)
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i)
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to
137 # avoid this so there's some idea of what process-accepted &
138 # process-new use from process-unchecked
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i)
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145 dump_file = utils.open_file(dump_filename, 'w')
147 os.chmod(dump_filename, 0660)
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
156 p = cPickle.Pickler(dump_file, 1)
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i]
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i]
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i]
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {}
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i]
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i]
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes
209 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" }
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"])
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
238 Subst["__REJECT_MESSAGE__"] = reject_message
239 Subst["__SOURCE__"] = changes.get("source", "Unknown")
240 Subst["__VERSION__"] = changes.get("version", "Unknown")
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes
246 files = self.pkg.files
248 byhand = summary = new = ""
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {}
255 override_summary ="";
256 file_keys = files.keys()
258 for file in file_keys:
259 if files[file].has_key("byhand"):
261 summary += file + " byhand\n"
262 elif files[file].has_key("new"):
264 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265 if files[file].has_key("othercomponents"):
266 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267 if files[file]["type"] == "deb":
268 deb_fh = utils.open_file(file)
269 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
272 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274 summary += file + "\n to " + destination + "\n"
275 if files[file]["type"] in ["deb", "udeb", "dsc"]:
276 # (queue/unchecked), there we have override entries already, use them
277 # (process-new), there we dont have override entries, use the newly generated ones.
278 override_prio = files[file].get("override priority", files[file]["priority"])
279 override_sect = files[file].get("override section", files[file]["section"])
280 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
282 short_summary = summary
284 # This is for direport's benefit...
285 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
288 summary += "Changes: " + f
290 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
292 summary += self.announce(short_summary, 0)
294 return (summary, short_summary)
296 ###########################################################################
298 def close_bugs (self, summary, action):
299 changes = self.pkg.changes
303 bugs = changes["closes"].keys()
309 if not self.nmu.is_an_nmu(self.pkg):
310 if changes["distribution"].has_key("experimental"):
311 # tag bugs as fixed-in-experimental for uploads to experimental
312 summary += "Setting bugs to severity fixed: "
315 summary += "%s " % (bug)
316 control_message += "tag %s + fixed-in-experimental\n" % (bug)
317 if action and control_message != "":
318 Subst["__CONTROL_MESSAGE__"] = control_message
319 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
320 utils.send_mail (mail_message)
322 self.Logger.log(["setting bugs to fixed"]+bugs)
326 summary += "Closing bugs: "
328 summary += "%s " % (bug)
330 Subst["__BUG_NUMBER__"] = bug
331 if changes["distribution"].has_key("stable"):
332 Subst["__STABLE_WARNING__"] = """
333 Note that this package is not part of the released stable Debian
334 distribution. It may have dependencies on other unreleased software,
335 or other instabilities. Please take care if you wish to install it.
336 The update will eventually make its way into the next released Debian
339 Subst["__STABLE_WARNING__"] = ""
340 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
341 utils.send_mail (mail_message)
343 self.Logger.log(["closing bugs"]+bugs)
346 summary += "Setting bugs to severity fixed: "
349 summary += "%s " % (bug)
350 control_message += "tag %s + fixed\n" % (bug)
351 if action and control_message != "":
352 Subst["__CONTROL_MESSAGE__"] = control_message
353 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
354 utils.send_mail (mail_message)
356 self.Logger.log(["setting bugs to fixed"]+bugs)
360 ###########################################################################
362 def announce (self, short_summary, action):
365 changes = self.pkg.changes
367 # Only do announcements for source uploads with a recent dpkg-dev installed
368 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
373 Subst["__SHORT_SUMMARY__"] = short_summary
375 for dist in changes["distribution"].keys():
376 list = Cnf.Find("Suite::%s::Announce" % (dist))
377 if list == "" or lists_done.has_key(list):
380 summary += "Announcing to %s\n" % (list)
383 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
384 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
385 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
386 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
387 utils.send_mail (mail_message)
389 if Cnf.FindB("Dinstall::CloseBugs"):
390 summary = self.close_bugs(summary, action)
394 ###########################################################################
396 def accept (self, summary, short_summary):
399 files = self.pkg.files
400 changes = self.pkg.changes
401 changes_file = self.pkg.changes_file
405 self.Logger.log(["Accepting changes",changes_file])
407 self.dump_vars(Cnf["Dir::Queue::Accepted"])
409 # Move all the files into the accepted directory
410 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
411 file_keys = files.keys()
412 for file in file_keys:
413 utils.move(file, Cnf["Dir::Queue::Accepted"])
414 self.accept_bytes += float(files[file]["size"])
415 self.accept_count += 1
417 # Send accept mail, announce to lists, close bugs and check for
418 # override disparities
419 if not Cnf["Dinstall::Options::No-Mail"]:
420 Subst["__SUITE__"] = ""
421 Subst["__SUMMARY__"] = summary
422 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
423 utils.send_mail(mail_message)
424 self.announce(short_summary, 1)
427 ## Helper stuff for DebBugs Version Tracking
428 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
429 # ??? once queue/* is cleared on *.d.o and/or reprocessed
430 # the conditionalization on dsc["bts changelog"] should be
433 # Write out the version history from the changelog
434 if changes["architecture"].has_key("source") and \
435 dsc.has_key("bts changelog"):
437 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
438 dotprefix=1, perms=0644)
439 version_history = utils.open_file(temp_filename, 'w')
440 version_history.write(dsc["bts changelog"])
441 version_history.close()
442 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
443 changes_file[:-8]+".versions")
444 os.rename(temp_filename, filename)
446 # Write out the binary -> source mapping.
447 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
448 dotprefix=1, perms=0644)
449 debinfo = utils.open_file(temp_filename, 'w')
450 for file in file_keys:
452 if f["type"] == "deb":
453 line = " ".join([f["package"], f["version"],
454 f["architecture"], f["source package"],
455 f["source version"]])
456 debinfo.write(line+"\n")
458 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
459 changes_file[:-8]+".debinfo")
460 os.rename(temp_filename, filename)
462 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
464 ###########################################################################
466 def queue_build (self, queue, path):
469 files = self.pkg.files
470 changes = self.pkg.changes
471 changes_file = self.pkg.changes_file
473 file_keys = files.keys()
475 ## Special support to enable clean auto-building of queued packages
476 queue_id = database.get_or_set_queue_id(queue)
478 self.projectB.query("BEGIN WORK")
479 for suite in changes["distribution"].keys():
480 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
482 suite_id = database.get_suite_id(suite)
483 dest_dir = Cnf["Dir::QueueBuild"]
484 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
485 dest_dir = os.path.join(dest_dir, suite)
486 for file in file_keys:
487 src = os.path.join(path, file)
488 dest = os.path.join(dest_dir, file)
489 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
490 # Copy it since the original won't be readable by www-data
491 utils.copy(src, dest)
493 # Create a symlink to it
494 os.symlink(src, dest)
495 # Add it to the list of packages for later processing by apt-ftparchive
496 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
497 # If the .orig.tar.gz is in the pool, create a symlink to
498 # it (if one doesn't already exist)
499 if self.pkg.orig_tar_id:
500 # Determine the .orig.tar.gz file name
501 for dsc_file in self.pkg.dsc_files.keys():
502 if dsc_file.endswith(".orig.tar.gz"):
504 dest = os.path.join(dest_dir, filename)
505 # If it doesn't exist, create a symlink
506 if not os.path.exists(dest):
507 # Find the .orig.tar.gz in the pool
508 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
511 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
512 src = os.path.join(ql[0][0], ql[0][1])
513 os.symlink(src, dest)
514 # Add it to the list of packages for later processing by apt-ftparchive
515 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
516 # if it does, update things to ensure it's not removed prematurely
518 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
520 self.projectB.query("COMMIT WORK")
522 ###########################################################################
524 def check_override (self):
526 changes = self.pkg.changes
527 files = self.pkg.files
530 # Abandon the check if:
531 # a) it's a non-sourceful upload
532 # b) override disparity checks have been disabled
533 # c) we're not sending mail
534 if not changes["architecture"].has_key("source") or \
535 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
536 Cnf["Dinstall::Options::No-Mail"]:
540 file_keys = files.keys()
542 for file in file_keys:
543 if not files[file].has_key("new") and files[file]["type"] == "deb":
544 section = files[file]["section"]
545 override_section = files[file]["override section"]
546 if section.lower() != override_section.lower() and section != "-":
547 # Ignore this; it's a common mistake and not worth whining about
548 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
550 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
551 priority = files[file]["priority"]
552 override_priority = files[file]["override priority"]
553 if priority != override_priority and priority != "-":
554 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
559 Subst["__SUMMARY__"] = summary
560 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
561 utils.send_mail(mail_message)
563 ###########################################################################
565 def force_reject (self, files):
566 """Forcefully move files from the current directory to the
567 reject directory. If any file already exists in the reject
568 directory it will be moved to the morgue to make way for
574 # Skip any files which don't exist or which we don't have permission to copy.
575 if os.access(file,os.R_OK) == 0:
577 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
579 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
581 # File exists? Let's try and move it to the morgue
582 if errno.errorcode[e.errno] == 'EEXIST':
583 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
585 morgue_file = utils.find_next_free(morgue_file)
586 except utils.tried_too_hard_exc:
587 # Something's either gone badly Pete Tong, or
588 # someone is trying to exploit us.
589 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
591 utils.move(dest_file, morgue_file, perms=0660)
593 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
596 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
600 # If we got here, we own the destination file, so we can
601 # safely overwrite it.
602 utils.move(file, dest_file, 1, perms=0660)
605 ###########################################################################
607 def do_reject (self, manual = 0, reject_message = ""):
608 # If we weren't given a manual rejection message, spawn an
609 # editor so the user can add one in...
610 if manual and not reject_message:
611 temp_filename = utils.temp_filename()
612 editor = os.environ.get("EDITOR","vi")
615 os.system("%s %s" % (editor, temp_filename))
616 temp_fh = utils.open_file(temp_filename)
617 reject_message = "".join(temp_fh.readlines())
619 print "Reject message:"
620 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
621 prompt = "[R]eject, Edit, Abandon, Quit ?"
623 while prompt.find(answer) == -1:
624 answer = utils.our_raw_input(prompt)
625 m = re_default_answer.search(prompt)
628 answer = answer[:1].upper()
629 os.unlink(temp_filename)
641 reason_filename = pkg.changes_file[:-8] + ".reason"
642 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
644 # Move all the files into the reject directory
645 reject_files = pkg.files.keys() + [pkg.changes_file]
646 self.force_reject(reject_files)
648 # If we fail here someone is probably trying to exploit the race
649 # so let's just raise an exception ...
650 if os.path.exists(reason_filename):
651 os.unlink(reason_filename)
652 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
655 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
656 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
657 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
658 os.write(reason_fd, reject_message)
659 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
661 # Build up the rejection email
662 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
664 Subst["__REJECTOR_ADDRESS__"] = user_email_address
665 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
666 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
667 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
668 # Write the rejection email out as the <foo>.reason file
669 os.write(reason_fd, reject_mail_message)
673 # Send the rejection mail if appropriate
674 if not Cnf["Dinstall::Options::No-Mail"]:
675 utils.send_mail(reject_mail_message)
677 self.Logger.log(["rejected", pkg.changes_file])
680 ################################################################################
682 # Ensure that source exists somewhere in the archive for the binary
683 # upload being processed.
685 # (1) exact match => 1.0-3
686 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
688 def source_exists (self, package, source_version, suites = ["any"]):
692 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
695 # source must exist in suite X, or in some other suite that's
696 # mapped to X, recursively... silent-maps are counted too,
697 # unreleased-maps aren't.
698 maps = self.Cnf.ValueList("SuiteMappings")[:]
700 maps = [ m.split() for m in maps ]
701 maps = [ (x[1], x[2]) for x in maps
702 if x[0] == "map" or x[0] == "silent-map" ]
705 if x[1] in s and x[0] not in s:
708 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
709 q = self.projectB.query(que)
711 # Reduce the query results to a list of version numbers
712 ql = [ i[0] for i in q.getresult() ]
715 if source_version in ql:
719 orig_source_version = re_bin_only_nmu.sub('', source_version)
720 if orig_source_version in ql:
728 ################################################################################
730 def in_override_p (self, package, component, suite, binary_type, file):
731 files = self.pkg.files
733 if binary_type == "": # must be source
738 # Override suite name; used for example with proposed-updates
739 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
740 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
742 # Avoid <undef> on unknown distributions
743 suite_id = database.get_suite_id(suite)
746 component_id = database.get_component_id(component)
747 type_id = database.get_override_type_id(type)
749 # FIXME: nasty non-US speficic hack
750 if component.lower().startswith("non-us/"):
751 component = component[7:]
753 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
754 % (package, suite_id, component_id, type_id))
755 result = q.getresult()
756 # If checking for a source package fall back on the binary override type
757 if type == "dsc" and not result:
758 deb_type_id = database.get_override_type_id("deb")
759 udeb_type_id = database.get_override_type_id("udeb")
760 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
761 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
762 result = q.getresult()
764 # Remember the section and priority so we can check them later if appropriate
766 files[file]["override section"] = result[0][0]
767 files[file]["override priority"] = result[0][1]
771 ################################################################################
773 def reject (self, str, prefix="Rejected: "):
775 # Unlike other rejects we add new lines first to avoid trailing
776 # new lines when this message is passed back up to a caller.
777 if self.reject_message:
778 self.reject_message += "\n"
779 self.reject_message += prefix + str
781 ################################################################################
783 def get_anyversion(self, query_result, suite):
785 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
786 for (v, s) in query_result:
787 if s in [ x.lower() for x in anysuite ]:
788 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
792 ################################################################################
794 def cross_suite_version_check(self, query_result, file, new_version):
795 """Ensure versions are newer than existing packages in target
796 suites and that cross-suite version checking rules as
797 set out in the conf file are satisfied."""
799 # Check versions for each target suite
800 for target_suite in self.pkg.changes["distribution"].keys():
801 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
802 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
803 # Enforce "must be newer than target suite" even if conffile omits it
804 if target_suite not in must_be_newer_than:
805 must_be_newer_than.append(target_suite)
806 for entry in query_result:
807 existent_version = entry[0]
809 if suite in must_be_newer_than and \
810 apt_pkg.VersionCompare(new_version, existent_version) < 1:
811 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
812 if suite in must_be_older_than and \
813 apt_pkg.VersionCompare(new_version, existent_version) > -1:
814 ch = self.pkg.changes
816 if ch.get('distribution-version', {}).has_key(suite):
817 # we really use the other suite, ignoring the conflicting one ...
818 addsuite = ch["distribution-version"][suite]
820 add_version = self.get_anyversion(query_result, addsuite)
821 target_version = self.get_anyversion(query_result, target_suite)
824 # not add_version can only happen if we map to a suite
825 # that doesn't enhance the suite we're propup'ing from.
826 # so "propup-ver x a b c; map a d" is a problem only if
827 # d doesn't enhance a.
829 # i think we could always propagate in this case, rather
830 # than complaining. either way, this isn't a REJECT issue
832 # And - we really should complain to the dorks who configured dak
833 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
834 self.pkg.changes.setdefault("propdistribution", {})
835 self.pkg.changes["propdistribution"][addsuite] = 1
837 elif not target_version:
838 # not targets_version is true when the package is NEW
839 # we could just stick with the "...old version..." REJECT
841 self.reject("Won't propogate NEW packages.")
842 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
843 # propogation would be redundant. no need to reject though.
844 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
846 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
847 apt_pkg.VersionCompare(add_version, target_version) >= 0:
849 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
850 self.pkg.changes.setdefault("propdistribution", {})
851 self.pkg.changes["propdistribution"][addsuite] = 1
855 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
857 ################################################################################
859 def check_binary_against_db(self, file):
860 self.reject_message = ""
861 files = self.pkg.files
863 # Ensure version is sane
864 q = self.projectB.query("""
865 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
867 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
868 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
869 % (files[file]["package"],
870 files[file]["architecture"]))
871 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
873 # Check for any existing copies of the file
874 q = self.projectB.query("""
875 SELECT b.id FROM binaries b, architecture a
876 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
877 AND a.id = b.architecture"""
878 % (files[file]["package"],
879 files[file]["version"],
880 files[file]["architecture"]))
882 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
884 return self.reject_message
886 ################################################################################
888 def check_source_against_db(self, file):
889 self.reject_message = ""
892 # Ensure version is sane
893 q = self.projectB.query("""
894 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
895 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
896 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
898 return self.reject_message
900 ################################################################################
903 # NB: this function can remove entries from the 'files' index [if
904 # the .orig.tar.gz is a duplicate of the one in the archive]; if
905 # you're iterating over 'files' and call this function as part of
906 # the loop, be sure to add a check to the top of the loop to
907 # ensure you haven't just tried to derefernece the deleted entry.
910 def check_dsc_against_db(self, file):
911 self.reject_message = ""
912 files = self.pkg.files
913 dsc_files = self.pkg.dsc_files
914 legacy_source_untouchable = self.pkg.legacy_source_untouchable
915 self.pkg.orig_tar_gz = None
917 # Try and find all files mentioned in the .dsc. This has
918 # to work harder to cope with the multiple possible
919 # locations of an .orig.tar.gz.
920 for dsc_file in dsc_files.keys():
922 if files.has_key(dsc_file):
923 actual_md5 = files[dsc_file]["md5sum"]
924 actual_size = int(files[dsc_file]["size"])
925 found = "%s in incoming" % (dsc_file)
926 # Check the file does not already exist in the archive
927 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
929 # Strip out anything that isn't '%s' or '/%s$'
931 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
934 # "[dak] has not broken them. [dak] has fixed a
935 # brokenness. Your crappy hack exploited a bug in
938 # "(Come on! I thought it was always obvious that
939 # one just doesn't release different files with
940 # the same name and version.)"
941 # -- ajk@ on d-devel@l.d.o
944 # Ignore exact matches for .orig.tar.gz
946 if dsc_file.endswith(".orig.tar.gz"):
948 if files.has_key(dsc_file) and \
949 int(files[dsc_file]["size"]) == int(i[0]) and \
950 files[dsc_file]["md5sum"] == i[1]:
951 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
953 self.pkg.orig_tar_gz = i[2] + i[3]
957 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
958 elif dsc_file.endswith(".orig.tar.gz"):
960 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
962 # Strip out anything that isn't '%s' or '/%s$'
964 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
968 # Unfortunately, we may get more than one match here if,
969 # for example, the package was in potato but had an -sa
970 # upload in woody. So we need to choose the right one.
972 x = ql[0]; # default to something sane in case we don't match any or have only one
976 old_file = i[0] + i[1]
977 old_file_fh = utils.open_file(old_file)
978 actual_md5 = apt_pkg.md5sum(old_file_fh)
980 actual_size = os.stat(old_file)[stat.ST_SIZE]
981 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
984 legacy_source_untouchable[i[3]] = ""
986 old_file = x[0] + x[1]
987 old_file_fh = utils.open_file(old_file)
988 actual_md5 = apt_pkg.md5sum(old_file_fh)
990 actual_size = os.stat(old_file)[stat.ST_SIZE]
993 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
994 # See install() in process-accepted...
995 self.pkg.orig_tar_id = x[3]
996 self.pkg.orig_tar_gz = old_file
997 if suite_type == "legacy" or suite_type == "legacy-mixed":
998 self.pkg.orig_tar_location = "legacy"
1000 self.pkg.orig_tar_location = x[4]
1002 # Not there? Check the queue directories...
1004 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1005 # See process_it() in 'dak process-unchecked' for explanation of this
1006 if os.path.exists(in_unchecked):
1007 return (self.reject_message, in_unchecked)
1009 for dir in [ "Accepted", "New", "Byhand" ]:
1010 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1011 if os.path.exists(in_otherdir):
1012 in_otherdir_fh = utils.open_file(in_otherdir)
1013 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1014 in_otherdir_fh.close()
1015 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1017 self.pkg.orig_tar_gz = in_otherdir
1020 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1021 self.pkg.orig_tar_gz = -1
1024 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1026 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1027 self.reject("md5sum for %s doesn't match %s." % (found, file))
1028 if actual_size != int(dsc_files[dsc_file]["size"]):
1029 self.reject("size for %s doesn't match %s." % (found, file))
1031 return (self.reject_message, None)
1033 def do_query(self, q):
1034 sys.stderr.write("query: \"%s\" ... " % (q))
1035 before = time.time()
1036 r = self.projectB.query(q)
1037 time_diff = time.time()-before
1038 sys.stderr.write("took %.3f seconds.\n" % (time_diff))