3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34 ###############################################################################
36 # Convenience wrapper to carry around all the package information in
39 def __init__(self, **kwds):
40 self.__dict__.update(kwds)
42 def update(self, **kwds):
43 self.__dict__.update(kwds)
45 ###############################################################################
48 # Read in the group maintainer override file
49 def __init__ (self, Cnf):
52 if Cnf.get("Dinstall::GroupOverrideFilename"):
53 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
54 file = utils.open_file(filename)
55 for line in file.readlines():
56 line = utils.re_comments.sub('', line).lower().strip()
58 self.group_maint[line] = 1
61 def is_an_nmu (self, pkg):
66 i = utils.fix_maintainer (dsc.get("maintainer",
67 Cnf["Dinstall::MyEmailAddress"]).lower())
68 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
69 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
70 if dsc_name == changes["maintainername"].lower() and \
71 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
74 if dsc.has_key("uploaders"):
75 uploaders = dsc["uploaders"].lower().split(",")
78 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
79 uploadernames[name] = ""
80 if uploadernames.has_key(changes["changedbyname"].lower()):
83 # Some group maintained packages (e.g. Debian QA) are never NMU's
84 if self.group_maint.has_key(changes["maintaineremail"].lower()):
89 ###############################################################################
93 def __init__(self, Cnf):
95 # Read in the group-maint override file
98 self.accept_bytes = 0L
99 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
100 legacy_source_untouchable = {})
102 # Initialize the substitution template mapping global
103 Subst = self.Subst = {}
104 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
105 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
106 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
107 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
109 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
110 database.init(Cnf, self.projectB)
112 ###########################################################################
114 def init_vars (self):
115 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
116 exec "self.pkg.%s.clear();" % (i)
117 self.pkg.orig_tar_id = None
118 self.pkg.orig_tar_location = ""
119 self.pkg.orig_tar_gz = None
121 ###########################################################################
123 def update_vars (self):
124 dump_filename = self.pkg.changes_file[:-8]+".dak"
125 dump_file = utils.open_file(dump_filename)
126 p = cPickle.Unpickler(dump_file)
127 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
128 exec "self.pkg.%s.update(p.load());" % (i)
129 for i in [ "orig_tar_id", "orig_tar_location" ]:
130 exec "self.pkg.%s = p.load();" % (i)
133 ###########################################################################
135 # This could just dump the dictionaries as is, but I'd like to
136 # avoid this so there's some idea of what process-accepted &
137 # process-new use from process-unchecked
139 def dump_vars(self, dest_dir):
140 for i in [ "changes", "dsc", "files", "dsc_files",
141 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
142 exec "%s = self.pkg.%s;" % (i,i)
143 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
144 dump_file = utils.open_file(dump_filename, 'w')
146 os.chmod(dump_filename, 0660)
148 if errno.errorcode[e.errno] == 'EPERM':
149 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
150 if perms & stat.S_IROTH:
151 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
155 p = cPickle.Pickler(dump_file, 1)
156 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
159 for file in files.keys():
161 for i in [ "package", "version", "architecture", "type", "size",
162 "md5sum", "component", "location id", "source package",
163 "source version", "maintainer", "dbtype", "files id",
164 "new", "section", "priority", "othercomponents",
165 "pool name", "original component" ]:
166 if files[file].has_key(i):
167 d_files[file][i] = files[file][i]
169 # Mandatory changes fields
170 for i in [ "distribution", "source", "architecture", "version",
171 "maintainer", "urgency", "fingerprint", "changedby822",
172 "changedby2047", "changedbyname", "maintainer822",
173 "maintainer2047", "maintainername", "maintaineremail",
174 "closes", "changes" ]:
175 d_changes[i] = changes[i]
176 # Optional changes fields
177 for i in [ "changed-by", "filecontents", "format", "process-new note", "distribution-version" ]:
178 if changes.has_key(i):
179 d_changes[i] = changes[i]
181 for i in [ "source", "version", "maintainer", "fingerprint",
182 "uploaders", "bts changelog" ]:
186 for file in dsc_files.keys():
187 d_dsc_files[file] = {}
188 # Mandatory dsc_files fields
189 for i in [ "size", "md5sum" ]:
190 d_dsc_files[file][i] = dsc_files[file][i]
191 # Optional dsc_files fields
192 for i in [ "files id" ]:
193 if dsc_files[file].has_key(i):
194 d_dsc_files[file][i] = dsc_files[file][i]
196 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
197 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
201 ###########################################################################
203 # Set up the per-package template substitution mappings
205 def update_subst (self, reject_message = ""):
207 changes = self.pkg.changes
208 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
209 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
210 changes["architecture"] = { "Unknown" : "" }
211 # and maintainer2047 may not exist.
212 if not changes.has_key("maintainer2047"):
213 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
215 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
216 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
217 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
219 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
220 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
221 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
222 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
223 changes["maintainer2047"])
224 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
226 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
227 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
228 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
229 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
230 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232 # Apply any global override of the Maintainer field
233 if self.Cnf.get("Dinstall::OverrideMaintainer"):
234 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
235 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
237 Subst["__REJECT_MESSAGE__"] = reject_message
238 Subst["__SOURCE__"] = changes.get("source", "Unknown")
239 Subst["__VERSION__"] = changes.get("version", "Unknown")
241 ###########################################################################
243 def build_summaries(self):
244 changes = self.pkg.changes
245 files = self.pkg.files
247 byhand = summary = new = ""
249 # changes["distribution"] may not exist in corner cases
250 # (e.g. unreadable changes files)
251 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
252 changes["distribution"] = {}
254 file_keys = files.keys()
256 for file in file_keys:
257 if files[file].has_key("byhand"):
259 summary += file + " byhand\n"
260 elif files[file].has_key("new"):
262 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
263 if files[file].has_key("othercomponents"):
264 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
265 if files[file]["type"] == "deb":
266 deb_fh = utils.open_file(file)
267 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
270 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
271 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
272 summary += file + "\n to " + destination + "\n"
274 short_summary = summary
276 # This is for direport's benefit...
277 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
280 summary += "Changes: " + f
282 summary += self.announce(short_summary, 0)
284 return (summary, short_summary)
286 ###########################################################################
288 def close_bugs (self, summary, action):
289 changes = self.pkg.changes
293 bugs = changes["closes"].keys()
299 if not self.nmu.is_an_nmu(self.pkg):
300 if changes["distribution"].has_key("experimental"):
301 # tag bugs as fixed-in-experimental for uploads to experimental
302 summary += "Setting bugs to severity fixed: "
305 summary += "%s " % (bug)
306 control_message += "tag %s + fixed-in-experimental\n" % (bug)
307 if action and control_message != "":
308 Subst["__CONTROL_MESSAGE__"] = control_message
309 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
310 utils.send_mail (mail_message)
312 self.Logger.log(["setting bugs to fixed"]+bugs)
316 summary += "Closing bugs: "
318 summary += "%s " % (bug)
320 Subst["__BUG_NUMBER__"] = bug
321 if changes["distribution"].has_key("stable"):
322 Subst["__STABLE_WARNING__"] = """
323 Note that this package is not part of the released stable Debian
324 distribution. It may have dependencies on other unreleased software,
325 or other instabilities. Please take care if you wish to install it.
326 The update will eventually make its way into the next released Debian
329 Subst["__STABLE_WARNING__"] = ""
330 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
331 utils.send_mail (mail_message)
333 self.Logger.log(["closing bugs"]+bugs)
336 summary += "Setting bugs to severity fixed: "
339 summary += "%s " % (bug)
340 control_message += "tag %s + fixed\n" % (bug)
341 if action and control_message != "":
342 Subst["__CONTROL_MESSAGE__"] = control_message
343 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
344 utils.send_mail (mail_message)
346 self.Logger.log(["setting bugs to fixed"]+bugs)
350 ###########################################################################
352 def announce (self, short_summary, action):
355 changes = self.pkg.changes
357 # Only do announcements for source uploads with a recent dpkg-dev installed
358 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
363 Subst["__SHORT_SUMMARY__"] = short_summary
365 for dist in changes["distribution"].keys():
366 list = Cnf.Find("Suite::%s::Announce" % (dist))
367 if list == "" or lists_done.has_key(list):
370 summary += "Announcing to %s\n" % (list)
373 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
374 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
375 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
376 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
377 utils.send_mail (mail_message)
379 if Cnf.FindB("Dinstall::CloseBugs"):
380 summary = self.close_bugs(summary, action)
384 ###########################################################################
386 def accept (self, summary, short_summary):
389 files = self.pkg.files
390 changes = self.pkg.changes
391 changes_file = self.pkg.changes_file
395 self.Logger.log(["Accepting changes",changes_file])
397 self.dump_vars(Cnf["Dir::Queue::Accepted"])
399 # Move all the files into the accepted directory
400 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
401 file_keys = files.keys()
402 for file in file_keys:
403 utils.move(file, Cnf["Dir::Queue::Accepted"])
404 self.accept_bytes += float(files[file]["size"])
405 self.accept_count += 1
407 # Send accept mail, announce to lists, close bugs and check for
408 # override disparities
409 if not Cnf["Dinstall::Options::No-Mail"]:
410 Subst["__SUITE__"] = ""
411 Subst["__SUMMARY__"] = summary
412 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
413 utils.send_mail(mail_message)
414 self.announce(short_summary, 1)
417 ## Helper stuff for DebBugs Version Tracking
418 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
419 # ??? once queue/* is cleared on *.d.o and/or reprocessed
420 # the conditionalization on dsc["bts changelog"] should be
423 # Write out the version history from the changelog
424 if changes["architecture"].has_key("source") and \
425 dsc.has_key("bts changelog"):
427 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
428 dotprefix=1, perms=0644)
429 version_history = utils.open_file(temp_filename, 'w')
430 version_history.write(dsc["bts changelog"])
431 version_history.close()
432 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
433 changes_file[:-8]+".versions")
434 os.rename(temp_filename, filename)
436 # Write out the binary -> source mapping.
437 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
438 dotprefix=1, perms=0644)
439 debinfo = utils.open_file(temp_filename, 'w')
440 for file in file_keys:
442 if f["type"] == "deb":
443 line = " ".join([f["package"], f["version"],
444 f["architecture"], f["source package"],
445 f["source version"]])
446 debinfo.write(line+"\n")
448 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
449 changes_file[:-8]+".debinfo")
450 os.rename(temp_filename, filename)
452 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
454 ###########################################################################
456 def queue_build (self, queue, path):
459 files = self.pkg.files
460 changes = self.pkg.changes
461 changes_file = self.pkg.changes_file
463 file_keys = files.keys()
465 ## Special support to enable clean auto-building of queued packages
466 queue_id = database.get_or_set_queue_id(queue)
468 self.projectB.query("BEGIN WORK")
469 for suite in changes["distribution"].keys():
470 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
472 suite_id = database.get_suite_id(suite)
473 dest_dir = Cnf["Dir::QueueBuild"]
474 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
475 dest_dir = os.path.join(dest_dir, suite)
476 for file in file_keys:
477 src = os.path.join(path, file)
478 dest = os.path.join(dest_dir, file)
479 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
480 # Copy it since the original won't be readable by www-data
481 utils.copy(src, dest)
483 # Create a symlink to it
484 os.symlink(src, dest)
485 # Add it to the list of packages for later processing by apt-ftparchive
486 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
487 # If the .orig.tar.gz is in the pool, create a symlink to
488 # it (if one doesn't already exist)
489 if self.pkg.orig_tar_id:
490 # Determine the .orig.tar.gz file name
491 for dsc_file in self.pkg.dsc_files.keys():
492 if dsc_file.endswith(".orig.tar.gz"):
494 dest = os.path.join(dest_dir, filename)
495 # If it doesn't exist, create a symlink
496 if not os.path.exists(dest):
497 # Find the .orig.tar.gz in the pool
498 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
501 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
502 src = os.path.join(ql[0][0], ql[0][1])
503 os.symlink(src, dest)
504 # Add it to the list of packages for later processing by apt-ftparchive
505 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
506 # if it does, update things to ensure it's not removed prematurely
508 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
510 self.projectB.query("COMMIT WORK")
512 ###########################################################################
514 def check_override (self):
516 changes = self.pkg.changes
517 files = self.pkg.files
520 # Abandon the check if:
521 # a) it's a non-sourceful upload
522 # b) override disparity checks have been disabled
523 # c) we're not sending mail
524 if not changes["architecture"].has_key("source") or \
525 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
526 Cnf["Dinstall::Options::No-Mail"]:
530 file_keys = files.keys()
532 for file in file_keys:
533 if not files[file].has_key("new") and files[file]["type"] == "deb":
534 section = files[file]["section"]
535 override_section = files[file]["override section"]
536 if section.lower() != override_section.lower() and section != "-":
537 # Ignore this; it's a common mistake and not worth whining about
538 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
540 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
541 priority = files[file]["priority"]
542 override_priority = files[file]["override priority"]
543 if priority != override_priority and priority != "-":
544 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
549 Subst["__SUMMARY__"] = summary
550 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
551 utils.send_mail(mail_message)
553 ###########################################################################
555 def force_reject (self, files):
556 """Forcefully move files from the current directory to the
557 reject directory. If any file already exists in the reject
558 directory it will be moved to the morgue to make way for
564 # Skip any files which don't exist or which we don't have permission to copy.
565 if os.access(file,os.R_OK) == 0:
567 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
569 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
571 # File exists? Let's try and move it to the morgue
572 if errno.errorcode[e.errno] == 'EEXIST':
573 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
575 morgue_file = utils.find_next_free(morgue_file)
576 except utils.tried_too_hard_exc:
577 # Something's either gone badly Pete Tong, or
578 # someone is trying to exploit us.
579 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
581 utils.move(dest_file, morgue_file, perms=0660)
583 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
586 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
590 # If we got here, we own the destination file, so we can
591 # safely overwrite it.
592 utils.move(file, dest_file, 1, perms=0660)
595 ###########################################################################
597 def do_reject (self, manual = 0, reject_message = ""):
598 # If we weren't given a manual rejection message, spawn an
599 # editor so the user can add one in...
600 if manual and not reject_message:
601 temp_filename = utils.temp_filename()
602 editor = os.environ.get("EDITOR","vi")
605 os.system("%s %s" % (editor, temp_filename))
606 temp_fh = utils.open_file(temp_filename)
607 reject_message = "".join(temp_fh.readlines())
609 print "Reject message:"
610 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
611 prompt = "[R]eject, Edit, Abandon, Quit ?"
613 while prompt.find(answer) == -1:
614 answer = utils.our_raw_input(prompt)
615 m = re_default_answer.search(prompt)
618 answer = answer[:1].upper()
619 os.unlink(temp_filename)
631 reason_filename = pkg.changes_file[:-8] + ".reason"
632 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
634 # Move all the files into the reject directory
635 reject_files = pkg.files.keys() + [pkg.changes_file]
636 self.force_reject(reject_files)
638 # If we fail here someone is probably trying to exploit the race
639 # so let's just raise an exception ...
640 if os.path.exists(reason_filename):
641 os.unlink(reason_filename)
642 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
645 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
646 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
647 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
648 os.write(reason_fd, reject_message)
649 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
651 # Build up the rejection email
652 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
654 Subst["__REJECTOR_ADDRESS__"] = user_email_address
655 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
656 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
657 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
658 # Write the rejection email out as the <foo>.reason file
659 os.write(reason_fd, reject_mail_message)
663 # Send the rejection mail if appropriate
664 if not Cnf["Dinstall::Options::No-Mail"]:
665 utils.send_mail(reject_mail_message)
667 self.Logger.log(["rejected", pkg.changes_file])
670 ################################################################################
672 # Ensure that source exists somewhere in the archive for the binary
673 # upload being processed.
675 # (1) exact match => 1.0-3
676 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
678 def source_exists (self, package, source_version, suites = ["any"]):
682 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
685 # source must exist in suite X, or in some other suite that's
686 # mapped to X, recursively... silent-maps are counted too,
687 # unreleased-maps aren't.
688 maps = self.Cnf.ValueList("SuiteMappings")[:]
690 maps = [ m.split() for m in maps ]
691 maps = [ (x[1], x[2]) for x in maps
692 if x[0] == "map" or x[0] == "silent-map" ]
695 if x[1] in s and x[0] not in s:
698 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
699 q = self.projectB.query(que)
701 # Reduce the query results to a list of version numbers
702 ql = [ i[0] for i in q.getresult() ]
705 if source_version in ql:
709 orig_source_version = re_bin_only_nmu.sub('', source_version)
710 if orig_source_version in ql:
718 ################################################################################
720 def in_override_p (self, package, component, suite, binary_type, file):
721 files = self.pkg.files
723 if binary_type == "": # must be source
728 # Override suite name; used for example with proposed-updates
729 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
730 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
732 # Avoid <undef> on unknown distributions
733 suite_id = database.get_suite_id(suite)
736 component_id = database.get_component_id(component)
737 type_id = database.get_override_type_id(type)
739 # FIXME: nasty non-US speficic hack
740 if component.lower().startswith("non-us/"):
741 component = component[7:]
743 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
744 % (package, suite_id, component_id, type_id))
745 result = q.getresult()
746 # If checking for a source package fall back on the binary override type
747 if type == "dsc" and not result:
748 deb_type_id = database.get_override_type_id("deb")
749 udeb_type_id = database.get_override_type_id("udeb")
750 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
751 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
752 result = q.getresult()
754 # Remember the section and priority so we can check them later if appropriate
756 files[file]["override section"] = result[0][0]
757 files[file]["override priority"] = result[0][1]
761 ################################################################################
763 def reject (self, str, prefix="Rejected: "):
765 # Unlike other rejects we add new lines first to avoid trailing
766 # new lines when this message is passed back up to a caller.
767 if self.reject_message:
768 self.reject_message += "\n"
769 self.reject_message += prefix + str
771 ################################################################################
773 def get_anyversion(self, query_result, suite):
775 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
776 for (v, s) in query_result:
777 if s in [ x.lower() for x in anysuite ]:
778 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
782 ################################################################################
784 def cross_suite_version_check(self, query_result, file, new_version):
785 """Ensure versions are newer than existing packages in target
786 suites and that cross-suite version checking rules as
787 set out in the conf file are satisfied."""
789 # Check versions for each target suite
790 for target_suite in self.pkg.changes["distribution"].keys():
791 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
792 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
793 # Enforce "must be newer than target suite" even if conffile omits it
794 if target_suite not in must_be_newer_than:
795 must_be_newer_than.append(target_suite)
796 for entry in query_result:
797 existent_version = entry[0]
799 if suite in must_be_newer_than and \
800 apt_pkg.VersionCompare(new_version, existent_version) < 1:
801 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
802 if suite in must_be_older_than and \
803 apt_pkg.VersionCompare(new_version, existent_version) > -1:
804 ch = self.pkg.changes
806 if ch.get('distribution-version', {}).has_key(suite):
807 # we really use the other suite, ignoring the conflicting one ...
808 addsuite = ch["distribution-version"][suite]
810 add_version = self.get_anyversion(query_result, addsuite)
811 target_version = self.get_anyversion(query_result, target_suite)
814 # not add_version can only happen if we map to a suite
815 # that doesn't enhance the suite we're propup'ing from.
816 # so "propup-ver x a b c; map a d" is a problem only if
817 # d doesn't enhance a.
819 # i think we could always propagate in this case, rather
820 # than complaining. either way, this isn't a REJECT issue
822 # And - we really should complain to the dorks who configured dak
823 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
824 self.pkg.changes.setdefault("propdistribution", {})
825 self.pkg.changes["propdistribution"][addsuite] = 1
827 elif not target_version:
828 # not targets_version is true when the package is NEW
829 # we could just stick with the "...old version..." REJECT
831 self.reject("Won't propogate NEW packages.")
832 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
833 # propogation would be redundant. no need to reject though.
834 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
836 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
837 apt_pkg.VersionCompare(add_version, target_version) >= 0:
839 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
840 self.pkg.changes.setdefault("propdistribution", {})
841 self.pkg.changes["propdistribution"][addsuite] = 1
845 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
847 ################################################################################
849 def check_binary_against_db(self, file):
850 self.reject_message = ""
851 files = self.pkg.files
853 # Ensure version is sane
854 q = self.projectB.query("""
855 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
857 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
858 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
859 % (files[file]["package"],
860 files[file]["architecture"]))
861 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
863 # Check for any existing copies of the file
864 q = self.projectB.query("""
865 SELECT b.id FROM binaries b, architecture a
866 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
867 AND a.id = b.architecture"""
868 % (files[file]["package"],
869 files[file]["version"],
870 files[file]["architecture"]))
872 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
874 return self.reject_message
876 ################################################################################
878 def check_source_against_db(self, file):
879 self.reject_message = ""
882 # Ensure version is sane
883 q = self.projectB.query("""
884 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
885 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
886 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
888 return self.reject_message
890 ################################################################################
893 # NB: this function can remove entries from the 'files' index [if
894 # the .orig.tar.gz is a duplicate of the one in the archive]; if
895 # you're iterating over 'files' and call this function as part of
896 # the loop, be sure to add a check to the top of the loop to
897 # ensure you haven't just tried to derefernece the deleted entry.
900 def check_dsc_against_db(self, file):
901 self.reject_message = ""
902 files = self.pkg.files
903 dsc_files = self.pkg.dsc_files
904 legacy_source_untouchable = self.pkg.legacy_source_untouchable
905 self.pkg.orig_tar_gz = None
907 # Try and find all files mentioned in the .dsc. This has
908 # to work harder to cope with the multiple possible
909 # locations of an .orig.tar.gz.
910 for dsc_file in dsc_files.keys():
912 if files.has_key(dsc_file):
913 actual_md5 = files[dsc_file]["md5sum"]
914 actual_size = int(files[dsc_file]["size"])
915 found = "%s in incoming" % (dsc_file)
916 # Check the file does not already exist in the archive
917 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
919 # Strip out anything that isn't '%s' or '/%s$'
921 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
924 # "[dak] has not broken them. [dak] has fixed a
925 # brokenness. Your crappy hack exploited a bug in
928 # "(Come on! I thought it was always obvious that
929 # one just doesn't release different files with
930 # the same name and version.)"
931 # -- ajk@ on d-devel@l.d.o
934 # Ignore exact matches for .orig.tar.gz
936 if dsc_file.endswith(".orig.tar.gz"):
938 if files.has_key(dsc_file) and \
939 int(files[dsc_file]["size"]) == int(i[0]) and \
940 files[dsc_file]["md5sum"] == i[1]:
941 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
943 self.pkg.orig_tar_gz = i[2] + i[3]
947 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
948 elif dsc_file.endswith(".orig.tar.gz"):
950 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
952 # Strip out anything that isn't '%s' or '/%s$'
954 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
958 # Unfortunately, we may get more than one match here if,
959 # for example, the package was in potato but had an -sa
960 # upload in woody. So we need to choose the right one.
962 x = ql[0]; # default to something sane in case we don't match any or have only one
966 old_file = i[0] + i[1]
967 old_file_fh = utils.open_file(old_file)
968 actual_md5 = apt_pkg.md5sum(old_file_fh)
970 actual_size = os.stat(old_file)[stat.ST_SIZE]
971 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
974 legacy_source_untouchable[i[3]] = ""
976 old_file = x[0] + x[1]
977 old_file_fh = utils.open_file(old_file)
978 actual_md5 = apt_pkg.md5sum(old_file_fh)
980 actual_size = os.stat(old_file)[stat.ST_SIZE]
983 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
984 # See install() in process-accepted...
985 self.pkg.orig_tar_id = x[3]
986 self.pkg.orig_tar_gz = old_file
987 if suite_type == "legacy" or suite_type == "legacy-mixed":
988 self.pkg.orig_tar_location = "legacy"
990 self.pkg.orig_tar_location = x[4]
992 # Not there? Check the queue directories...
994 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
995 # See process_it() in 'dak process-unchecked' for explanation of this
996 if os.path.exists(in_unchecked):
997 return (self.reject_message, in_unchecked)
999 for dir in [ "Accepted", "New", "Byhand" ]:
1000 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1001 if os.path.exists(in_otherdir):
1002 in_otherdir_fh = utils.open_file(in_otherdir)
1003 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1004 in_otherdir_fh.close()
1005 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1007 self.pkg.orig_tar_gz = in_otherdir
1010 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1011 self.pkg.orig_tar_gz = -1
1014 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1016 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1017 self.reject("md5sum for %s doesn't match %s." % (found, file))
1018 if actual_size != int(dsc_files[dsc_file]["size"]):
1019 self.reject("size for %s doesn't match %s." % (found, file))
1021 return (self.reject_message, None)
1023 def do_query(self, q):
1024 sys.stderr.write("query: \"%s\" ... " % (q))
1025 before = time.time()
1026 r = self.projectB.query(q)
1027 time_diff = time.time()-before
1028 sys.stderr.write("took %.3f seconds.\n" % (time_diff))