3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
49 # Read in the group maintainer override file
50 def __init__ (self, Cnf):
53 if Cnf.get("Dinstall::GroupOverrideFilename"):
54 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55 file = utils.open_file(filename)
56 for line in file.readlines():
57 line = utils.re_comments.sub('', line).lower().strip()
59 self.group_maint[line] = 1
62 def is_an_nmu (self, pkg):
67 i = utils.fix_maintainer (dsc.get("maintainer",
68 Cnf["Dinstall::MyEmailAddress"]).lower())
69 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",")
79 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80 uploadernames[name] = ""
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
99 self.accept_bytes = 0L
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {})
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {}
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111 database.init(Cnf, self.projectB)
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i)
118 self.pkg.orig_tar_id = None
119 self.pkg.orig_tar_location = ""
120 self.pkg.orig_tar_gz = None
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".dak"
126 dump_file = utils.open_file(dump_filename)
127 p = cPickle.Unpickler(dump_file)
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i)
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i)
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to
137 # avoid this so there's some idea of what process-accepted &
138 # process-new use from process-unchecked
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i)
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145 dump_file = utils.open_file(dump_filename, 'w')
147 os.chmod(dump_filename, 0660)
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
156 p = cPickle.Pickler(dump_file, 1)
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i]
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i]
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i]
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {}
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i]
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i]
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes
209 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" }
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"])
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
238 Subst["__REJECT_MESSAGE__"] = reject_message
239 Subst["__SOURCE__"] = changes.get("source", "Unknown")
240 Subst["__VERSION__"] = changes.get("version", "Unknown")
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes
246 files = self.pkg.files
248 byhand = summary = new = ""
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {}
255 file_keys = files.keys()
257 for file in file_keys:
258 if files[file].has_key("byhand"):
260 summary += file + " byhand\n"
261 elif files[file].has_key("new"):
263 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
264 if files[file].has_key("othercomponents"):
265 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
266 if files[file]["type"] == "deb":
267 deb_fh = utils.open_file(file)
268 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
271 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
272 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
273 summary += file + "\n to " + destination + "\n"
275 short_summary = summary
277 # This is for direport's benefit...
278 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
281 summary += "Changes: " + f
283 summary += self.announce(short_summary, 0)
285 return (summary, short_summary)
287 ###########################################################################
289 def close_bugs (self, summary, action):
290 changes = self.pkg.changes
294 bugs = changes["closes"].keys()
300 if not self.nmu.is_an_nmu(self.pkg):
301 if changes["distribution"].has_key("experimental"):
302 # tag bugs as fixed-in-experimental for uploads to experimental
303 summary += "Setting bugs to severity fixed: "
306 summary += "%s " % (bug)
307 control_message += "tag %s + fixed-in-experimental\n" % (bug)
308 if action and control_message != "":
309 Subst["__CONTROL_MESSAGE__"] = control_message
310 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
311 utils.send_mail (mail_message)
313 self.Logger.log(["setting bugs to fixed"]+bugs)
317 summary += "Closing bugs: "
319 summary += "%s " % (bug)
321 Subst["__BUG_NUMBER__"] = bug
322 if changes["distribution"].has_key("stable"):
323 Subst["__STABLE_WARNING__"] = """
324 Note that this package is not part of the released stable Debian
325 distribution. It may have dependencies on other unreleased software,
326 or other instabilities. Please take care if you wish to install it.
327 The update will eventually make its way into the next released Debian
330 Subst["__STABLE_WARNING__"] = ""
331 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
332 utils.send_mail (mail_message)
334 self.Logger.log(["closing bugs"]+bugs)
337 summary += "Setting bugs to severity fixed: "
340 summary += "%s " % (bug)
341 control_message += "tag %s + fixed\n" % (bug)
342 if action and control_message != "":
343 Subst["__CONTROL_MESSAGE__"] = control_message
344 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
345 utils.send_mail (mail_message)
347 self.Logger.log(["setting bugs to fixed"]+bugs)
351 ###########################################################################
353 def announce (self, short_summary, action):
356 changes = self.pkg.changes
358 # Only do announcements for source uploads with a recent dpkg-dev installed
359 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
364 Subst["__SHORT_SUMMARY__"] = short_summary
366 for dist in changes["distribution"].keys():
367 list = Cnf.Find("Suite::%s::Announce" % (dist))
368 if list == "" or lists_done.has_key(list):
371 summary += "Announcing to %s\n" % (list)
374 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
375 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
376 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
377 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
378 utils.send_mail (mail_message)
380 if Cnf.FindB("Dinstall::CloseBugs"):
381 summary = self.close_bugs(summary, action)
385 ###########################################################################
387 def accept (self, summary, short_summary):
390 files = self.pkg.files
391 changes = self.pkg.changes
392 changes_file = self.pkg.changes_file
396 self.Logger.log(["Accepting changes",changes_file])
398 self.dump_vars(Cnf["Dir::Queue::Accepted"])
400 # Move all the files into the accepted directory
401 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
402 file_keys = files.keys()
403 for file in file_keys:
404 utils.move(file, Cnf["Dir::Queue::Accepted"])
405 self.accept_bytes += float(files[file]["size"])
406 self.accept_count += 1
408 # Send accept mail, announce to lists, close bugs and check for
409 # override disparities
410 if not Cnf["Dinstall::Options::No-Mail"]:
411 Subst["__SUITE__"] = ""
412 Subst["__SUMMARY__"] = summary
413 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
414 utils.send_mail(mail_message)
415 self.announce(short_summary, 1)
418 ## Helper stuff for DebBugs Version Tracking
419 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
420 # ??? once queue/* is cleared on *.d.o and/or reprocessed
421 # the conditionalization on dsc["bts changelog"] should be
424 # Write out the version history from the changelog
425 if changes["architecture"].has_key("source") and \
426 dsc.has_key("bts changelog"):
428 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
429 dotprefix=1, perms=0644)
430 version_history = utils.open_file(temp_filename, 'w')
431 version_history.write(dsc["bts changelog"])
432 version_history.close()
433 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
434 changes_file[:-8]+".versions")
435 os.rename(temp_filename, filename)
437 # Write out the binary -> source mapping.
438 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
439 dotprefix=1, perms=0644)
440 debinfo = utils.open_file(temp_filename, 'w')
441 for file in file_keys:
443 if f["type"] == "deb":
444 line = " ".join([f["package"], f["version"],
445 f["architecture"], f["source package"],
446 f["source version"]])
447 debinfo.write(line+"\n")
449 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
450 changes_file[:-8]+".debinfo")
451 os.rename(temp_filename, filename)
453 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
455 ###########################################################################
457 def queue_build (self, queue, path):
460 files = self.pkg.files
461 changes = self.pkg.changes
462 changes_file = self.pkg.changes_file
464 file_keys = files.keys()
466 ## Special support to enable clean auto-building of queued packages
467 queue_id = database.get_or_set_queue_id(queue)
469 self.projectB.query("BEGIN WORK")
470 for suite in changes["distribution"].keys():
471 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
473 suite_id = database.get_suite_id(suite)
474 dest_dir = Cnf["Dir::QueueBuild"]
475 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
476 dest_dir = os.path.join(dest_dir, suite)
477 for file in file_keys:
478 src = os.path.join(path, file)
479 dest = os.path.join(dest_dir, file)
480 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
481 # Copy it since the original won't be readable by www-data
482 utils.copy(src, dest)
484 # Create a symlink to it
485 os.symlink(src, dest)
486 # Add it to the list of packages for later processing by apt-ftparchive
487 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
488 # If the .orig.tar.gz is in the pool, create a symlink to
489 # it (if one doesn't already exist)
490 if self.pkg.orig_tar_id:
491 # Determine the .orig.tar.gz file name
492 for dsc_file in self.pkg.dsc_files.keys():
493 if dsc_file.endswith(".orig.tar.gz"):
495 dest = os.path.join(dest_dir, filename)
496 # If it doesn't exist, create a symlink
497 if not os.path.exists(dest):
498 # Find the .orig.tar.gz in the pool
499 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
502 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
503 src = os.path.join(ql[0][0], ql[0][1])
504 os.symlink(src, dest)
505 # Add it to the list of packages for later processing by apt-ftparchive
506 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
507 # if it does, update things to ensure it's not removed prematurely
509 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
511 self.projectB.query("COMMIT WORK")
513 ###########################################################################
515 def check_override (self):
517 changes = self.pkg.changes
518 files = self.pkg.files
521 # Abandon the check if:
522 # a) it's a non-sourceful upload
523 # b) override disparity checks have been disabled
524 # c) we're not sending mail
525 if not changes["architecture"].has_key("source") or \
526 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
527 Cnf["Dinstall::Options::No-Mail"]:
531 file_keys = files.keys()
533 for file in file_keys:
534 if not files[file].has_key("new") and files[file]["type"] == "deb":
535 section = files[file]["section"]
536 override_section = files[file]["override section"]
537 if section.lower() != override_section.lower() and section != "-":
538 # Ignore this; it's a common mistake and not worth whining about
539 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
541 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
542 priority = files[file]["priority"]
543 override_priority = files[file]["override priority"]
544 if priority != override_priority and priority != "-":
545 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
550 Subst["__SUMMARY__"] = summary
551 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
552 utils.send_mail(mail_message)
554 ###########################################################################
556 def force_reject (self, files):
557 """Forcefully move files from the current directory to the
558 reject directory. If any file already exists in the reject
559 directory it will be moved to the morgue to make way for
565 # Skip any files which don't exist or which we don't have permission to copy.
566 if os.access(file,os.R_OK) == 0:
568 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
570 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
572 # File exists? Let's try and move it to the morgue
573 if errno.errorcode[e.errno] == 'EEXIST':
574 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
576 morgue_file = utils.find_next_free(morgue_file)
577 except utils.tried_too_hard_exc:
578 # Something's either gone badly Pete Tong, or
579 # someone is trying to exploit us.
580 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
582 utils.move(dest_file, morgue_file, perms=0660)
584 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
587 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
591 # If we got here, we own the destination file, so we can
592 # safely overwrite it.
593 utils.move(file, dest_file, 1, perms=0660)
596 ###########################################################################
598 def do_reject (self, manual = 0, reject_message = ""):
599 # If we weren't given a manual rejection message, spawn an
600 # editor so the user can add one in...
601 if manual and not reject_message:
602 temp_filename = utils.temp_filename()
603 editor = os.environ.get("EDITOR","vi")
606 os.system("%s %s" % (editor, temp_filename))
607 temp_fh = utils.open_file(temp_filename)
608 reject_message = "".join(temp_fh.readlines())
610 print "Reject message:"
611 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
612 prompt = "[R]eject, Edit, Abandon, Quit ?"
614 while prompt.find(answer) == -1:
615 answer = utils.our_raw_input(prompt)
616 m = re_default_answer.search(prompt)
619 answer = answer[:1].upper()
620 os.unlink(temp_filename)
632 reason_filename = pkg.changes_file[:-8] + ".reason"
633 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
635 # Move all the files into the reject directory
636 reject_files = pkg.files.keys() + [pkg.changes_file]
637 self.force_reject(reject_files)
639 # If we fail here someone is probably trying to exploit the race
640 # so let's just raise an exception ...
641 if os.path.exists(reason_filename):
642 os.unlink(reason_filename)
643 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
646 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
647 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
648 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
649 os.write(reason_fd, reject_message)
650 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
652 # Build up the rejection email
653 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
655 Subst["__REJECTOR_ADDRESS__"] = user_email_address
656 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
657 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
658 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
659 # Write the rejection email out as the <foo>.reason file
660 os.write(reason_fd, reject_mail_message)
664 # Send the rejection mail if appropriate
665 if not Cnf["Dinstall::Options::No-Mail"]:
666 utils.send_mail(reject_mail_message)
668 self.Logger.log(["rejected", pkg.changes_file])
671 ################################################################################
673 # Ensure that source exists somewhere in the archive for the binary
674 # upload being processed.
676 # (1) exact match => 1.0-3
677 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
679 def source_exists (self, package, source_version, suites = ["any"]):
683 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
686 # source must exist in suite X, or in some other suite that's
687 # mapped to X, recursively... silent-maps are counted too,
688 # unreleased-maps aren't.
689 maps = self.Cnf.ValueList("SuiteMappings")[:]
691 maps = [ m.split() for m in maps ]
692 maps = [ (x[1], x[2]) for x in maps
693 if x[0] == "map" or x[0] == "silent-map" ]
696 if x[1] in s and x[0] not in s:
699 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
700 q = self.projectB.query(que)
702 # Reduce the query results to a list of version numbers
703 ql = [ i[0] for i in q.getresult() ]
706 if source_version in ql:
710 orig_source_version = re_bin_only_nmu.sub('', source_version)
711 if orig_source_version in ql:
719 ################################################################################
721 def in_override_p (self, package, component, suite, binary_type, file):
722 files = self.pkg.files
724 if binary_type == "": # must be source
729 # Override suite name; used for example with proposed-updates
730 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
731 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
733 # Avoid <undef> on unknown distributions
734 suite_id = database.get_suite_id(suite)
737 component_id = database.get_component_id(component)
738 type_id = database.get_override_type_id(type)
740 # FIXME: nasty non-US speficic hack
741 if component.lower().startswith("non-us/"):
742 component = component[7:]
744 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
745 % (package, suite_id, component_id, type_id))
746 result = q.getresult()
747 # If checking for a source package fall back on the binary override type
748 if type == "dsc" and not result:
749 deb_type_id = database.get_override_type_id("deb")
750 udeb_type_id = database.get_override_type_id("udeb")
751 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
752 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
753 result = q.getresult()
755 # Remember the section and priority so we can check them later if appropriate
757 files[file]["override section"] = result[0][0]
758 files[file]["override priority"] = result[0][1]
762 ################################################################################
764 def reject (self, str, prefix="Rejected: "):
766 # Unlike other rejects we add new lines first to avoid trailing
767 # new lines when this message is passed back up to a caller.
768 if self.reject_message:
769 self.reject_message += "\n"
770 self.reject_message += prefix + str
772 ################################################################################
774 def get_anyversion(self, query_result, suite):
776 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
777 for (v, s) in query_result:
778 if s in [ x.lower() for x in anysuite ]:
779 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
783 ################################################################################
785 def cross_suite_version_check(self, query_result, file, new_version):
786 """Ensure versions are newer than existing packages in target
787 suites and that cross-suite version checking rules as
788 set out in the conf file are satisfied."""
790 # Check versions for each target suite
791 for target_suite in self.pkg.changes["distribution"].keys():
792 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
793 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
794 # Enforce "must be newer than target suite" even if conffile omits it
795 if target_suite not in must_be_newer_than:
796 must_be_newer_than.append(target_suite)
797 for entry in query_result:
798 existent_version = entry[0]
800 if suite in must_be_newer_than and \
801 apt_pkg.VersionCompare(new_version, existent_version) < 1:
802 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
803 if suite in must_be_older_than and \
804 apt_pkg.VersionCompare(new_version, existent_version) > -1:
805 ch = self.pkg.changes
807 if ch.get('distribution-version', {}).has_key(suite):
808 # we really use the other suite, ignoring the conflicting one ...
809 addsuite = ch["distribution-version"][suite]
811 add_version = self.get_anyversion(query_result, addsuite)
812 target_version = self.get_anyversion(query_result, target_suite)
815 # not add_version can only happen if we map to a suite
816 # that doesn't enhance the suite we're propup'ing from.
817 # so "propup-ver x a b c; map a d" is a problem only if
818 # d doesn't enhance a.
820 # i think we could always propagate in this case, rather
821 # than complaining. either way, this isn't a REJECT issue
823 # And - we really should complain to the dorks who configured dak
824 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
825 self.pkg.changes.setdefault("propdistribution", {})
826 self.pkg.changes["propdistribution"][addsuite] = 1
828 elif not target_version:
829 # not targets_version is true when the package is NEW
830 # we could just stick with the "...old version..." REJECT
832 self.reject("Won't propogate NEW packages.")
833 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
834 # propogation would be redundant. no need to reject though.
835 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
837 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
838 apt_pkg.VersionCompare(add_version, target_version) >= 0:
840 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
841 self.pkg.changes.setdefault("propdistribution", {})
842 self.pkg.changes["propdistribution"][addsuite] = 1
846 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
848 ################################################################################
850 def check_binary_against_db(self, file):
851 self.reject_message = ""
852 files = self.pkg.files
854 # Ensure version is sane
855 q = self.projectB.query("""
856 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
858 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
859 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
860 % (files[file]["package"],
861 files[file]["architecture"]))
862 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
864 # Check for any existing copies of the file
865 q = self.projectB.query("""
866 SELECT b.id FROM binaries b, architecture a
867 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
868 AND a.id = b.architecture"""
869 % (files[file]["package"],
870 files[file]["version"],
871 files[file]["architecture"]))
873 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
875 return self.reject_message
877 ################################################################################
879 def check_source_against_db(self, file):
880 self.reject_message = ""
883 # Ensure version is sane
884 q = self.projectB.query("""
885 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
886 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
887 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
889 return self.reject_message
891 ################################################################################
894 # NB: this function can remove entries from the 'files' index [if
895 # the .orig.tar.gz is a duplicate of the one in the archive]; if
896 # you're iterating over 'files' and call this function as part of
897 # the loop, be sure to add a check to the top of the loop to
898 # ensure you haven't just tried to derefernece the deleted entry.
901 def check_dsc_against_db(self, file):
902 self.reject_message = ""
903 files = self.pkg.files
904 dsc_files = self.pkg.dsc_files
905 legacy_source_untouchable = self.pkg.legacy_source_untouchable
906 self.pkg.orig_tar_gz = None
908 # Try and find all files mentioned in the .dsc. This has
909 # to work harder to cope with the multiple possible
910 # locations of an .orig.tar.gz.
911 for dsc_file in dsc_files.keys():
913 if files.has_key(dsc_file):
914 actual_md5 = files[dsc_file]["md5sum"]
915 actual_size = int(files[dsc_file]["size"])
916 found = "%s in incoming" % (dsc_file)
917 # Check the file does not already exist in the archive
918 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
920 # Strip out anything that isn't '%s' or '/%s$'
922 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
925 # "[dak] has not broken them. [dak] has fixed a
926 # brokenness. Your crappy hack exploited a bug in
929 # "(Come on! I thought it was always obvious that
930 # one just doesn't release different files with
931 # the same name and version.)"
932 # -- ajk@ on d-devel@l.d.o
935 # Ignore exact matches for .orig.tar.gz
937 if dsc_file.endswith(".orig.tar.gz"):
939 if files.has_key(dsc_file) and \
940 int(files[dsc_file]["size"]) == int(i[0]) and \
941 files[dsc_file]["md5sum"] == i[1]:
942 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
944 self.pkg.orig_tar_gz = i[2] + i[3]
948 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
949 elif dsc_file.endswith(".orig.tar.gz"):
951 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
953 # Strip out anything that isn't '%s' or '/%s$'
955 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
959 # Unfortunately, we may get more than one match here if,
960 # for example, the package was in potato but had an -sa
961 # upload in woody. So we need to choose the right one.
963 x = ql[0]; # default to something sane in case we don't match any or have only one
967 old_file = i[0] + i[1]
968 old_file_fh = utils.open_file(old_file)
969 actual_md5 = apt_pkg.md5sum(old_file_fh)
971 actual_size = os.stat(old_file)[stat.ST_SIZE]
972 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
975 legacy_source_untouchable[i[3]] = ""
977 old_file = x[0] + x[1]
978 old_file_fh = utils.open_file(old_file)
979 actual_md5 = apt_pkg.md5sum(old_file_fh)
981 actual_size = os.stat(old_file)[stat.ST_SIZE]
984 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
985 # See install() in process-accepted...
986 self.pkg.orig_tar_id = x[3]
987 self.pkg.orig_tar_gz = old_file
988 if suite_type == "legacy" or suite_type == "legacy-mixed":
989 self.pkg.orig_tar_location = "legacy"
991 self.pkg.orig_tar_location = x[4]
993 # Not there? Check the queue directories...
995 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
996 # See process_it() in 'dak process-unchecked' for explanation of this
997 if os.path.exists(in_unchecked):
998 return (self.reject_message, in_unchecked)
1000 for dir in [ "Accepted", "New", "Byhand" ]:
1001 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1002 if os.path.exists(in_otherdir):
1003 in_otherdir_fh = utils.open_file(in_otherdir)
1004 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1005 in_otherdir_fh.close()
1006 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1008 self.pkg.orig_tar_gz = in_otherdir
1011 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1012 self.pkg.orig_tar_gz = -1
1015 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1017 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1018 self.reject("md5sum for %s doesn't match %s." % (found, file))
1019 if actual_size != int(dsc_files[dsc_file]["size"]):
1020 self.reject("size for %s doesn't match %s." % (found, file))
1022 return (self.reject_message, None)
1024 def do_query(self, q):
1025 sys.stderr.write("query: \"%s\" ... " % (q))
1026 before = time.time()
1027 r = self.projectB.query(q)
1028 time_diff = time.time()-before
1029 sys.stderr.write("took %.3f seconds.\n" % (time_diff))