3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
49 # Read in the group maintainer override file
50 def __init__ (self, Cnf):
53 if Cnf.get("Dinstall::GroupOverrideFilename"):
54 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55 file = utils.open_file(filename)
56 for line in file.readlines():
57 line = utils.re_comments.sub('', line).lower().strip()
59 self.group_maint[line] = 1
62 def is_an_nmu (self, pkg):
67 i = utils.fix_maintainer (dsc.get("maintainer",
68 Cnf["Dinstall::MyEmailAddress"]).lower())
69 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",")
79 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80 uploadernames[name] = ""
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
99 self.accept_bytes = 0L
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {})
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {}
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111 database.init(Cnf, self.projectB)
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i)
118 self.pkg.orig_tar_id = None
119 self.pkg.orig_tar_location = ""
120 self.pkg.orig_tar_gz = None
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".dak"
126 dump_file = utils.open_file(dump_filename)
127 p = cPickle.Unpickler(dump_file)
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i)
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i)
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to
137 # avoid this so there's some idea of what process-accepted &
138 # process-new use from process-unchecked
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i)
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145 dump_file = utils.open_file(dump_filename, 'w')
147 os.chmod(dump_filename, 0660)
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
156 p = cPickle.Pickler(dump_file, 1)
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i]
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i]
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i]
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {}
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i]
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i]
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes
209 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" }
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"])
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
238 Subst["__REJECT_MESSAGE__"] = reject_message
239 Subst["__SOURCE__"] = changes.get("source", "Unknown")
240 Subst["__VERSION__"] = changes.get("version", "Unknown")
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes
246 files = self.pkg.files
248 byhand = summary = new = ""
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {}
255 override_summary ="";
256 file_keys = files.keys()
258 for file in file_keys:
259 if files[file].has_key("byhand"):
261 summary += file + " byhand\n"
262 elif files[file].has_key("new"):
264 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265 if files[file].has_key("othercomponents"):
266 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267 if files[file]["type"] == "deb":
268 deb_fh = utils.open_file(file)
269 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
272 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274 summary += file + "\n to " + destination + "\n"
275 if not files[file].has_key("type"):
276 files[file]["type"] = "unknown"
277 if files[file]["type"] in ["deb", "udeb", "dsc"]:
278 # (queue/unchecked), there we have override entries already, use them
279 # (process-new), there we dont have override entries, use the newly generated ones.
280 override_prio = files[file].get("override priority", files[file]["priority"])
281 override_sect = files[file].get("override section", files[file]["section"])
282 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
284 short_summary = summary
286 # This is for direport's benefit...
287 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
290 summary += "Changes: " + f
292 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
294 summary += self.announce(short_summary, 0)
296 return (summary, short_summary)
298 ###########################################################################
300 def close_bugs (self, summary, action):
301 changes = self.pkg.changes
305 bugs = changes["closes"].keys()
311 if not self.nmu.is_an_nmu(self.pkg):
312 if changes["distribution"].has_key("experimental"):
313 # tag bugs as fixed-in-experimental for uploads to experimental
314 summary += "Setting bugs to severity fixed: "
317 summary += "%s " % (bug)
318 control_message += "tag %s + fixed-in-experimental\n" % (bug)
319 if action and control_message != "":
320 Subst["__CONTROL_MESSAGE__"] = control_message
321 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
322 utils.send_mail (mail_message)
324 self.Logger.log(["setting bugs to fixed"]+bugs)
328 summary += "Closing bugs: "
330 summary += "%s " % (bug)
332 Subst["__BUG_NUMBER__"] = bug
333 if changes["distribution"].has_key("stable"):
334 Subst["__STABLE_WARNING__"] = """
335 Note that this package is not part of the released stable Debian
336 distribution. It may have dependencies on other unreleased software,
337 or other instabilities. Please take care if you wish to install it.
338 The update will eventually make its way into the next released Debian
341 Subst["__STABLE_WARNING__"] = ""
342 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
343 utils.send_mail (mail_message)
345 self.Logger.log(["closing bugs"]+bugs)
348 summary += "Setting bugs to severity fixed: "
351 summary += "%s " % (bug)
352 control_message += "tag %s + fixed\n" % (bug)
353 if action and control_message != "":
354 Subst["__CONTROL_MESSAGE__"] = control_message
355 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
356 utils.send_mail (mail_message)
358 self.Logger.log(["setting bugs to fixed"]+bugs)
362 ###########################################################################
364 def announce (self, short_summary, action):
367 changes = self.pkg.changes
369 # Only do announcements for source uploads with a recent dpkg-dev installed
370 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
375 Subst["__SHORT_SUMMARY__"] = short_summary
377 for dist in changes["distribution"].keys():
378 list = Cnf.Find("Suite::%s::Announce" % (dist))
379 if list == "" or lists_done.has_key(list):
382 summary += "Announcing to %s\n" % (list)
385 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
386 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
387 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
388 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
389 utils.send_mail (mail_message)
391 if Cnf.FindB("Dinstall::CloseBugs"):
392 summary = self.close_bugs(summary, action)
396 ###########################################################################
398 def accept (self, summary, short_summary):
401 files = self.pkg.files
402 changes = self.pkg.changes
403 changes_file = self.pkg.changes_file
407 self.Logger.log(["Accepting changes",changes_file])
409 self.dump_vars(Cnf["Dir::Queue::Accepted"])
411 # Move all the files into the accepted directory
412 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
413 file_keys = files.keys()
414 for file in file_keys:
415 utils.move(file, Cnf["Dir::Queue::Accepted"])
416 self.accept_bytes += float(files[file]["size"])
417 self.accept_count += 1
419 # Send accept mail, announce to lists, close bugs and check for
420 # override disparities
421 if not Cnf["Dinstall::Options::No-Mail"]:
422 Subst["__SUITE__"] = ""
423 Subst["__SUMMARY__"] = summary
424 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
425 utils.send_mail(mail_message)
426 self.announce(short_summary, 1)
429 ## Helper stuff for DebBugs Version Tracking
430 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
431 # ??? once queue/* is cleared on *.d.o and/or reprocessed
432 # the conditionalization on dsc["bts changelog"] should be
435 # Write out the version history from the changelog
436 if changes["architecture"].has_key("source") and \
437 dsc.has_key("bts changelog"):
439 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
440 dotprefix=1, perms=0644)
441 version_history = utils.open_file(temp_filename, 'w')
442 version_history.write(dsc["bts changelog"])
443 version_history.close()
444 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
445 changes_file[:-8]+".versions")
446 os.rename(temp_filename, filename)
448 # Write out the binary -> source mapping.
449 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
450 dotprefix=1, perms=0644)
451 debinfo = utils.open_file(temp_filename, 'w')
452 for file in file_keys:
454 if f["type"] == "deb":
455 line = " ".join([f["package"], f["version"],
456 f["architecture"], f["source package"],
457 f["source version"]])
458 debinfo.write(line+"\n")
460 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
461 changes_file[:-8]+".debinfo")
462 os.rename(temp_filename, filename)
464 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
466 ###########################################################################
468 def queue_build (self, queue, path):
471 files = self.pkg.files
472 changes = self.pkg.changes
473 changes_file = self.pkg.changes_file
475 file_keys = files.keys()
477 ## Special support to enable clean auto-building of queued packages
478 queue_id = database.get_or_set_queue_id(queue)
480 self.projectB.query("BEGIN WORK")
481 for suite in changes["distribution"].keys():
482 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
484 suite_id = database.get_suite_id(suite)
485 dest_dir = Cnf["Dir::QueueBuild"]
486 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
487 dest_dir = os.path.join(dest_dir, suite)
488 for file in file_keys:
489 src = os.path.join(path, file)
490 dest = os.path.join(dest_dir, file)
491 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
492 # Copy it since the original won't be readable by www-data
493 utils.copy(src, dest)
495 # Create a symlink to it
496 os.symlink(src, dest)
497 # Add it to the list of packages for later processing by apt-ftparchive
498 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
499 # If the .orig.tar.gz is in the pool, create a symlink to
500 # it (if one doesn't already exist)
501 if self.pkg.orig_tar_id:
502 # Determine the .orig.tar.gz file name
503 for dsc_file in self.pkg.dsc_files.keys():
504 if dsc_file.endswith(".orig.tar.gz"):
506 dest = os.path.join(dest_dir, filename)
507 # If it doesn't exist, create a symlink
508 if not os.path.exists(dest):
509 # Find the .orig.tar.gz in the pool
510 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
513 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
514 src = os.path.join(ql[0][0], ql[0][1])
515 os.symlink(src, dest)
516 # Add it to the list of packages for later processing by apt-ftparchive
517 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
518 # if it does, update things to ensure it's not removed prematurely
520 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
522 self.projectB.query("COMMIT WORK")
524 ###########################################################################
526 def check_override (self):
528 changes = self.pkg.changes
529 files = self.pkg.files
532 # Abandon the check if:
533 # a) it's a non-sourceful upload
534 # b) override disparity checks have been disabled
535 # c) we're not sending mail
536 if not changes["architecture"].has_key("source") or \
537 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
538 Cnf["Dinstall::Options::No-Mail"]:
542 file_keys = files.keys()
544 for file in file_keys:
545 if not files[file].has_key("new") and files[file]["type"] == "deb":
546 section = files[file]["section"]
547 override_section = files[file]["override section"]
548 if section.lower() != override_section.lower() and section != "-":
549 # Ignore this; it's a common mistake and not worth whining about
550 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
552 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
553 priority = files[file]["priority"]
554 override_priority = files[file]["override priority"]
555 if priority != override_priority and priority != "-":
556 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
561 Subst["__SUMMARY__"] = summary
562 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
563 utils.send_mail(mail_message)
565 ###########################################################################
567 def force_reject (self, files):
568 """Forcefully move files from the current directory to the
569 reject directory. If any file already exists in the reject
570 directory it will be moved to the morgue to make way for
576 # Skip any files which don't exist or which we don't have permission to copy.
577 if os.access(file,os.R_OK) == 0:
579 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
581 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
583 # File exists? Let's try and move it to the morgue
584 if errno.errorcode[e.errno] == 'EEXIST':
585 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
587 morgue_file = utils.find_next_free(morgue_file)
588 except utils.tried_too_hard_exc:
589 # Something's either gone badly Pete Tong, or
590 # someone is trying to exploit us.
591 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
593 utils.move(dest_file, morgue_file, perms=0660)
595 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
598 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
602 # If we got here, we own the destination file, so we can
603 # safely overwrite it.
604 utils.move(file, dest_file, 1, perms=0660)
607 ###########################################################################
609 def do_reject (self, manual = 0, reject_message = ""):
610 # If we weren't given a manual rejection message, spawn an
611 # editor so the user can add one in...
612 if manual and not reject_message:
613 temp_filename = utils.temp_filename()
614 editor = os.environ.get("EDITOR","vi")
617 os.system("%s %s" % (editor, temp_filename))
618 temp_fh = utils.open_file(temp_filename)
619 reject_message = "".join(temp_fh.readlines())
621 print "Reject message:"
622 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
623 prompt = "[R]eject, Edit, Abandon, Quit ?"
625 while prompt.find(answer) == -1:
626 answer = utils.our_raw_input(prompt)
627 m = re_default_answer.search(prompt)
630 answer = answer[:1].upper()
631 os.unlink(temp_filename)
643 reason_filename = pkg.changes_file[:-8] + ".reason"
644 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
646 # Move all the files into the reject directory
647 reject_files = pkg.files.keys() + [pkg.changes_file]
648 self.force_reject(reject_files)
650 # If we fail here someone is probably trying to exploit the race
651 # so let's just raise an exception ...
652 if os.path.exists(reason_filename):
653 os.unlink(reason_filename)
654 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
657 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
658 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
659 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
660 os.write(reason_fd, reject_message)
661 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
663 # Build up the rejection email
664 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
666 Subst["__REJECTOR_ADDRESS__"] = user_email_address
667 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
668 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
669 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
670 # Write the rejection email out as the <foo>.reason file
671 os.write(reason_fd, reject_mail_message)
675 # Send the rejection mail if appropriate
676 if not Cnf["Dinstall::Options::No-Mail"]:
677 utils.send_mail(reject_mail_message)
679 self.Logger.log(["rejected", pkg.changes_file])
682 ################################################################################
684 # Ensure that source exists somewhere in the archive for the binary
685 # upload being processed.
687 # (1) exact match => 1.0-3
688 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
690 def source_exists (self, package, source_version, suites = ["any"]):
694 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
697 # source must exist in suite X, or in some other suite that's
698 # mapped to X, recursively... silent-maps are counted too,
699 # unreleased-maps aren't.
700 maps = self.Cnf.ValueList("SuiteMappings")[:]
702 maps = [ m.split() for m in maps ]
703 maps = [ (x[1], x[2]) for x in maps
704 if x[0] == "map" or x[0] == "silent-map" ]
707 if x[1] in s and x[0] not in s:
710 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
711 q = self.projectB.query(que)
713 # Reduce the query results to a list of version numbers
714 ql = [ i[0] for i in q.getresult() ]
717 if source_version in ql:
721 orig_source_version = re_bin_only_nmu.sub('', source_version)
722 if orig_source_version in ql:
730 ################################################################################
732 def in_override_p (self, package, component, suite, binary_type, file):
733 files = self.pkg.files
735 if binary_type == "": # must be source
740 # Override suite name; used for example with proposed-updates
741 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
742 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
744 # Avoid <undef> on unknown distributions
745 suite_id = database.get_suite_id(suite)
748 component_id = database.get_component_id(component)
749 type_id = database.get_override_type_id(type)
751 # FIXME: nasty non-US speficic hack
752 if component.lower().startswith("non-us/"):
753 component = component[7:]
755 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
756 % (package, suite_id, component_id, type_id))
757 result = q.getresult()
758 # If checking for a source package fall back on the binary override type
759 if type == "dsc" and not result:
760 deb_type_id = database.get_override_type_id("deb")
761 udeb_type_id = database.get_override_type_id("udeb")
762 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
763 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
764 result = q.getresult()
766 # Remember the section and priority so we can check them later if appropriate
768 files[file]["override section"] = result[0][0]
769 files[file]["override priority"] = result[0][1]
773 ################################################################################
775 def reject (self, str, prefix="Rejected: "):
777 # Unlike other rejects we add new lines first to avoid trailing
778 # new lines when this message is passed back up to a caller.
779 if self.reject_message:
780 self.reject_message += "\n"
781 self.reject_message += prefix + str
783 ################################################################################
785 def get_anyversion(self, query_result, suite):
787 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
788 for (v, s) in query_result:
789 if s in [ x.lower() for x in anysuite ]:
790 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
794 ################################################################################
796 def cross_suite_version_check(self, query_result, file, new_version):
797 """Ensure versions are newer than existing packages in target
798 suites and that cross-suite version checking rules as
799 set out in the conf file are satisfied."""
801 # Check versions for each target suite
802 for target_suite in self.pkg.changes["distribution"].keys():
803 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
804 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
805 # Enforce "must be newer than target suite" even if conffile omits it
806 if target_suite not in must_be_newer_than:
807 must_be_newer_than.append(target_suite)
808 for entry in query_result:
809 existent_version = entry[0]
811 if suite in must_be_newer_than and \
812 apt_pkg.VersionCompare(new_version, existent_version) < 1:
813 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
814 if suite in must_be_older_than and \
815 apt_pkg.VersionCompare(new_version, existent_version) > -1:
816 ch = self.pkg.changes
818 if ch.get('distribution-version', {}).has_key(suite):
819 # we really use the other suite, ignoring the conflicting one ...
820 addsuite = ch["distribution-version"][suite]
822 add_version = self.get_anyversion(query_result, addsuite)
823 target_version = self.get_anyversion(query_result, target_suite)
826 # not add_version can only happen if we map to a suite
827 # that doesn't enhance the suite we're propup'ing from.
828 # so "propup-ver x a b c; map a d" is a problem only if
829 # d doesn't enhance a.
831 # i think we could always propagate in this case, rather
832 # than complaining. either way, this isn't a REJECT issue
834 # And - we really should complain to the dorks who configured dak
835 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
836 self.pkg.changes.setdefault("propdistribution", {})
837 self.pkg.changes["propdistribution"][addsuite] = 1
839 elif not target_version:
840 # not targets_version is true when the package is NEW
841 # we could just stick with the "...old version..." REJECT
843 self.reject("Won't propogate NEW packages.")
844 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
845 # propogation would be redundant. no need to reject though.
846 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
848 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
849 apt_pkg.VersionCompare(add_version, target_version) >= 0:
851 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
852 self.pkg.changes.setdefault("propdistribution", {})
853 self.pkg.changes["propdistribution"][addsuite] = 1
857 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
859 ################################################################################
861 def check_binary_against_db(self, file):
862 self.reject_message = ""
863 files = self.pkg.files
865 # Ensure version is sane
866 q = self.projectB.query("""
867 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
869 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
870 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
871 % (files[file]["package"],
872 files[file]["architecture"]))
873 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
875 # Check for any existing copies of the file
876 q = self.projectB.query("""
877 SELECT b.id FROM binaries b, architecture a
878 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
879 AND a.id = b.architecture"""
880 % (files[file]["package"],
881 files[file]["version"],
882 files[file]["architecture"]))
884 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
886 return self.reject_message
888 ################################################################################
890 def check_source_against_db(self, file):
891 self.reject_message = ""
894 # Ensure version is sane
895 q = self.projectB.query("""
896 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
897 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
898 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
900 return self.reject_message
902 ################################################################################
905 # NB: this function can remove entries from the 'files' index [if
906 # the .orig.tar.gz is a duplicate of the one in the archive]; if
907 # you're iterating over 'files' and call this function as part of
908 # the loop, be sure to add a check to the top of the loop to
909 # ensure you haven't just tried to derefernece the deleted entry.
912 def check_dsc_against_db(self, file):
913 self.reject_message = ""
914 files = self.pkg.files
915 dsc_files = self.pkg.dsc_files
916 legacy_source_untouchable = self.pkg.legacy_source_untouchable
917 self.pkg.orig_tar_gz = None
919 # Try and find all files mentioned in the .dsc. This has
920 # to work harder to cope with the multiple possible
921 # locations of an .orig.tar.gz.
922 for dsc_file in dsc_files.keys():
924 if files.has_key(dsc_file):
925 actual_md5 = files[dsc_file]["md5sum"]
926 actual_size = int(files[dsc_file]["size"])
927 found = "%s in incoming" % (dsc_file)
928 # Check the file does not already exist in the archive
929 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
931 # Strip out anything that isn't '%s' or '/%s$'
933 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
936 # "[dak] has not broken them. [dak] has fixed a
937 # brokenness. Your crappy hack exploited a bug in
940 # "(Come on! I thought it was always obvious that
941 # one just doesn't release different files with
942 # the same name and version.)"
943 # -- ajk@ on d-devel@l.d.o
946 # Ignore exact matches for .orig.tar.gz
948 if dsc_file.endswith(".orig.tar.gz"):
950 if files.has_key(dsc_file) and \
951 int(files[dsc_file]["size"]) == int(i[0]) and \
952 files[dsc_file]["md5sum"] == i[1]:
953 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
955 self.pkg.orig_tar_gz = i[2] + i[3]
959 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
960 elif dsc_file.endswith(".orig.tar.gz"):
962 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
964 # Strip out anything that isn't '%s' or '/%s$'
966 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
970 # Unfortunately, we may get more than one match here if,
971 # for example, the package was in potato but had an -sa
972 # upload in woody. So we need to choose the right one.
974 x = ql[0]; # default to something sane in case we don't match any or have only one
978 old_file = i[0] + i[1]
979 old_file_fh = utils.open_file(old_file)
980 actual_md5 = apt_pkg.md5sum(old_file_fh)
982 actual_size = os.stat(old_file)[stat.ST_SIZE]
983 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
986 legacy_source_untouchable[i[3]] = ""
988 old_file = x[0] + x[1]
989 old_file_fh = utils.open_file(old_file)
990 actual_md5 = apt_pkg.md5sum(old_file_fh)
992 actual_size = os.stat(old_file)[stat.ST_SIZE]
995 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
996 # See install() in process-accepted...
997 self.pkg.orig_tar_id = x[3]
998 self.pkg.orig_tar_gz = old_file
999 if suite_type == "legacy" or suite_type == "legacy-mixed":
1000 self.pkg.orig_tar_location = "legacy"
1002 self.pkg.orig_tar_location = x[4]
1004 # Not there? Check the queue directories...
1006 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1007 # See process_it() in 'dak process-unchecked' for explanation of this
1008 if os.path.exists(in_unchecked):
1009 return (self.reject_message, in_unchecked)
1011 for dir in [ "Accepted", "New", "Byhand" ]:
1012 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1013 if os.path.exists(in_otherdir):
1014 in_otherdir_fh = utils.open_file(in_otherdir)
1015 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1016 in_otherdir_fh.close()
1017 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1019 self.pkg.orig_tar_gz = in_otherdir
1022 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1023 self.pkg.orig_tar_gz = -1
1026 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1028 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1029 self.reject("md5sum for %s doesn't match %s." % (found, file))
1030 if actual_size != int(dsc_files[dsc_file]["size"]):
1031 self.reject("size for %s doesn't match %s." % (found, file))
1033 return (self.reject_message, None)
1035 def do_query(self, q):
1036 sys.stderr.write("query: \"%s\" ... " % (q))
1037 before = time.time()
1038 r = self.projectB.query(q)
1039 time_diff = time.time()-before
1040 sys.stderr.write("took %.3f seconds.\n" % (time_diff))