3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.57 2005-12-05 03:45:12 ajt Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile(r"\n\n");
34 re_bin_only_nmu = re.compile(r"\+b\d+$");
36 ###############################################################################
38 # Convenience wrapper to carry around all the package information in
41 def __init__(self, **kwds):
42 self.__dict__.update(kwds);
44 def update(self, **kwds):
45 self.__dict__.update(kwds);
47 ###############################################################################
50 # Read in the group maintainer override file
51 def __init__ (self, Cnf):
52 self.group_maint = {};
54 if Cnf.get("Dinstall::GroupOverrideFilename"):
55 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
56 file = utils.open_file(filename);
57 for line in file.readlines():
58 line = utils.re_comments.sub('', line).lower().strip();
60 self.group_maint[line] = 1;
63 def is_an_nmu (self, pkg):
65 changes = pkg.changes;
68 i = utils.fix_maintainer (dsc.get("maintainer",
69 Cnf["Dinstall::MyEmailAddress"]).lower());
70 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == changes["maintainername"].lower() and \
73 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = dsc["uploaders"].lower().split(",");
80 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
81 uploadernames[name] = "";
82 if uploadernames.has_key(changes["changedbyname"].lower()):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(changes["maintaineremail"].lower()):
91 ###############################################################################
95 def __init__(self, Cnf):
97 # Read in the group-maint override file
98 self.nmu = nmu_p(Cnf);
99 self.accept_count = 0;
100 self.accept_bytes = 0L;
101 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
102 legacy_source_untouchable = {});
104 # Initialize the substitution template mapping global
105 Subst = self.Subst = {};
106 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
107 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
108 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
109 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
112 db_access.init(Cnf, self.projectB);
114 ###########################################################################
116 def init_vars (self):
117 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
118 exec "self.pkg.%s.clear();" % (i);
119 self.pkg.orig_tar_id = None;
120 self.pkg.orig_tar_location = "";
121 self.pkg.orig_tar_gz = None;
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
147 os.chmod(dump_filename, 0660);
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
156 p = cPickle.Pickler(dump_file, 1);
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i];
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i];
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "lisa note", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i];
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {};
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i];
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i];
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes;
209 # If jennifer crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" };
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"]);
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
238 Subst["__REJECT_MESSAGE__"] = reject_message;
239 Subst["__SOURCE__"] = changes.get("source", "Unknown");
240 Subst["__VERSION__"] = changes.get("version", "Unknown");
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes;
246 files = self.pkg.files;
248 byhand = summary = new = "";
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {};
255 file_keys = files.keys();
257 for file in file_keys:
258 if files[file].has_key("byhand"):
260 summary += file + " byhand\n"
261 elif files[file].has_key("new"):
263 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
264 if files[file].has_key("othercomponents"):
265 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
266 if files[file]["type"] == "deb":
267 deb_fh = utils.open_file(file)
268 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n';
271 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
272 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
273 summary += file + "\n to " + destination + "\n"
275 short_summary = summary;
277 # This is for direport's benefit...
278 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
281 summary += "Changes: " + f;
283 summary += self.announce(short_summary, 0)
285 return (summary, short_summary);
287 ###########################################################################
289 def close_bugs (self, summary, action):
290 changes = self.pkg.changes;
294 bugs = changes["closes"].keys();
300 if not self.nmu.is_an_nmu(self.pkg):
301 if changes["distribution"].has_key("experimental"):
302 # tag bugs as fixed-in-experimental for uploads to experimental
303 summary += "Setting bugs to severity fixed: ";
304 control_message = "";
306 summary += "%s " % (bug);
307 control_message += "tag %s + fixed-in-experimental\n" % (bug);
308 if action and control_message != "":
309 Subst["__CONTROL_MESSAGE__"] = control_message;
310 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
311 utils.send_mail (mail_message);
313 self.Logger.log(["setting bugs to fixed"]+bugs);
317 summary += "Closing bugs: ";
319 summary += "%s " % (bug);
321 Subst["__BUG_NUMBER__"] = bug;
322 if changes["distribution"].has_key("stable"):
323 Subst["__STABLE_WARNING__"] = """
324 Note that this package is not part of the released stable Debian
325 distribution. It may have dependencies on other unreleased software,
326 or other instabilities. Please take care if you wish to install it.
327 The update will eventually make its way into the next released Debian
330 Subst["__STABLE_WARNING__"] = "";
331 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
332 utils.send_mail (mail_message);
334 self.Logger.log(["closing bugs"]+bugs);
337 summary += "Setting bugs to severity fixed: ";
338 control_message = "";
340 summary += "%s " % (bug);
341 control_message += "tag %s + fixed\n" % (bug);
342 if action and control_message != "":
343 Subst["__CONTROL_MESSAGE__"] = control_message;
344 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
345 utils.send_mail (mail_message);
347 self.Logger.log(["setting bugs to fixed"]+bugs);
351 ###########################################################################
353 def announce (self, short_summary, action):
356 changes = self.pkg.changes;
358 # Only do announcements for source uploads with a recent dpkg-dev installed
359 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
364 Subst["__SHORT_SUMMARY__"] = short_summary;
366 for dist in changes["distribution"].keys():
367 list = Cnf.Find("Suite::%s::Announce" % (dist));
368 if list == "" or lists_done.has_key(list):
370 lists_done[list] = 1;
371 summary += "Announcing to %s\n" % (list);
374 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
375 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
376 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
377 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
378 utils.send_mail (mail_message);
380 if Cnf.FindB("Dinstall::CloseBugs"):
381 summary = self.close_bugs(summary, action);
385 ###########################################################################
387 def accept (self, summary, short_summary):
390 files = self.pkg.files;
391 changes = self.pkg.changes;
392 changes_file = self.pkg.changes_file;
396 self.Logger.log(["Accepting changes",changes_file]);
398 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
400 # Move all the files into the accepted directory
401 utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
402 file_keys = files.keys();
403 for file in file_keys:
404 utils.move(file, Cnf["Dir::Queue::Accepted"]);
405 self.accept_bytes += float(files[file]["size"])
406 self.accept_count += 1;
408 # Send accept mail, announce to lists, close bugs and check for
409 # override disparities
410 if not Cnf["Dinstall::Options::No-Mail"]:
411 Subst["__SUITE__"] = "";
412 Subst["__SUMMARY__"] = summary;
413 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
414 utils.send_mail(mail_message)
415 self.announce(short_summary, 1)
418 ## Helper stuff for DebBugs Version Tracking
419 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
420 # ??? once queue/* is cleared on *.d.o and/or reprocessed
421 # the conditionalization on dsc["bts changelog"] should be
424 # Write out the version history from the changelog
425 if changes["architecture"].has_key("source") and \
426 dsc.has_key("bts changelog"):
428 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
429 dotprefix=1, perms=0644);
430 version_history = utils.open_file(temp_filename, 'w');
431 version_history.write(dsc["bts changelog"]);
432 version_history.close();
433 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
434 changes_file[:-8]+".versions");
435 os.rename(temp_filename, filename);
437 # Write out the binary -> source mapping.
438 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
439 dotprefix=1, perms=0644);
440 debinfo = utils.open_file(temp_filename, 'w');
441 for file in file_keys:
443 if f["type"] == "deb":
444 line = " ".join([f["package"], f["version"],
445 f["architecture"], f["source package"],
446 f["source version"]]);
447 debinfo.write(line+"\n");
449 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
450 changes_file[:-8]+".debinfo");
451 os.rename(temp_filename, filename);
453 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
455 ###########################################################################
457 def queue_build (self, queue, path):
458 ## Special support to enable clean auto-building of queued packages
459 queue_id = get_or_set_queue_id(queue)
461 self.projectB.query("BEGIN WORK");
462 for suite in changes["distribution"].keys():
463 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
465 suite_id = db_access.get_suite_id(suite);
466 dest_dir = Cnf["Dir::QueueBuild"];
467 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
468 dest_dir = os.path.join(dest_dir, suite);
469 for file in file_keys:
470 src = os.path.join(path, file);
471 dest = os.path.join(dest_dir, file);
472 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
473 # Copy it since the original won't be readable by www-data
474 utils.copy(src, dest);
476 # Create a symlink to it
477 os.symlink(src, dest);
478 # Add it to the list of packages for later processing by apt-ftparchive
479 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, queue_id, '%s', 't')" % (suite_id, dest));
480 # If the .orig.tar.gz is in the pool, create a symlink to
481 # it (if one doesn't already exist)
482 if self.pkg.orig_tar_id:
483 # Determine the .orig.tar.gz file name
484 for dsc_file in self.pkg.dsc_files.keys():
485 if dsc_file.endswith(".orig.tar.gz"):
487 dest = os.path.join(dest_dir, filename);
488 # If it doesn't exist, create a symlink
489 if not os.path.exists(dest):
490 # Find the .orig.tar.gz in the pool
491 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
494 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
495 src = os.path.join(ql[0][0], ql[0][1]);
496 os.symlink(src, dest);
497 # Add it to the list of packages for later processing by apt-ftparchive
498 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, queue_id, '%s', 't')" % (suite_id, dest));
499 # if it does, update things to ensure it's not removed prematurely
501 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
503 self.projectB.query("COMMIT WORK");
505 ###########################################################################
507 def check_override (self):
509 changes = self.pkg.changes;
510 files = self.pkg.files;
513 # Abandon the check if:
514 # a) it's a non-sourceful upload
515 # b) override disparity checks have been disabled
516 # c) we're not sending mail
517 if not changes["architecture"].has_key("source") or \
518 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
519 Cnf["Dinstall::Options::No-Mail"]:
523 file_keys = files.keys();
525 for file in file_keys:
526 if not files[file].has_key("new") and files[file]["type"] == "deb":
527 section = files[file]["section"];
528 override_section = files[file]["override section"];
529 if section.lower() != override_section.lower() and section != "-":
530 # Ignore this; it's a common mistake and not worth whining about
531 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
533 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
534 priority = files[file]["priority"];
535 override_priority = files[file]["override priority"];
536 if priority != override_priority and priority != "-":
537 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
542 Subst["__SUMMARY__"] = summary;
543 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
544 utils.send_mail(mail_message);
546 ###########################################################################
548 def force_reject (self, files):
549 """Forcefully move files from the current directory to the
550 reject directory. If any file already exists in the reject
551 directory it will be moved to the morgue to make way for
557 # Skip any files which don't exist or which we don't have permission to copy.
558 if os.access(file,os.R_OK) == 0:
560 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
562 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
564 # File exists? Let's try and move it to the morgue
565 if errno.errorcode[e.errno] == 'EEXIST':
566 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
568 morgue_file = utils.find_next_free(morgue_file);
569 except utils.tried_too_hard_exc:
570 # Something's either gone badly Pete Tong, or
571 # someone is trying to exploit us.
572 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
574 utils.move(dest_file, morgue_file, perms=0660);
576 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
579 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
583 # If we got here, we own the destination file, so we can
584 # safely overwrite it.
585 utils.move(file, dest_file, 1, perms=0660);
588 ###########################################################################
590 def do_reject (self, manual = 0, reject_message = ""):
591 # If we weren't given a manual rejection message, spawn an
592 # editor so the user can add one in...
593 if manual and not reject_message:
594 temp_filename = utils.temp_filename();
595 editor = os.environ.get("EDITOR","vi")
598 os.system("%s %s" % (editor, temp_filename))
599 temp_fh = utils.open_file(temp_filename);
600 reject_message = "".join(temp_fh.readlines());
602 print "Reject message:";
603 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
604 prompt = "[R]eject, Edit, Abandon, Quit ?"
606 while prompt.find(answer) == -1:
607 answer = utils.our_raw_input(prompt);
608 m = re_default_answer.search(prompt);
611 answer = answer[:1].upper();
612 os.unlink(temp_filename);
624 reason_filename = pkg.changes_file[:-8] + ".reason";
625 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
627 # Move all the files into the reject directory
628 reject_files = pkg.files.keys() + [pkg.changes_file];
629 self.force_reject(reject_files);
631 # If we fail here someone is probably trying to exploit the race
632 # so let's just raise an exception ...
633 if os.path.exists(reason_filename):
634 os.unlink(reason_filename);
635 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
638 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
639 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
640 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
641 os.write(reason_fd, reject_message);
642 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
644 # Build up the rejection email
645 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
647 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
648 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
649 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
650 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
651 # Write the rejection email out as the <foo>.reason file
652 os.write(reason_fd, reject_mail_message);
656 # Send the rejection mail if appropriate
657 if not Cnf["Dinstall::Options::No-Mail"]:
658 utils.send_mail(reject_mail_message);
660 self.Logger.log(["rejected", pkg.changes_file]);
663 ################################################################################
665 # Ensure that source exists somewhere in the archive for the binary
666 # upload being processed.
668 # (1) exact match => 1.0-3
669 # (2) Bin-only NMU of an MU => 1.0-3.0.1
670 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
672 def source_exists (self, package, source_version, suites = ["any"]):
676 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
679 # source must exist in suite X, or in some other suite that's
680 # mapped to X, recursively... silent-maps are counted too,
681 # unreleased-maps aren't.
682 maps = self.Cnf.ValueList("SuiteMappings")[:]
684 maps = [ m.split() for m in maps ]
685 maps = [ (x[1], x[2]) for x in maps
686 if x[0] == "map" or x[0] == "silent-map" ]
689 if x[1] in s and x[0] not in s:
692 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
693 q = self.projectB.query(que)
695 # Reduce the query results to a list of version numbers
696 ql = map(lambda x: x[0], q.getresult());
699 if source_version in ql:
703 orig_source_version = re_bin_only_nmu.sub('', source_version)
704 if orig_source_version in ql:
711 ################################################################################
713 def in_override_p (self, package, component, suite, binary_type, file):
714 files = self.pkg.files;
716 if binary_type == "": # must be source
721 # Override suite name; used for example with proposed-updates
722 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
723 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
725 # Avoid <undef> on unknown distributions
726 suite_id = db_access.get_suite_id(suite);
729 component_id = db_access.get_component_id(component);
730 type_id = db_access.get_override_type_id(type);
732 # FIXME: nasty non-US speficic hack
733 if component.lower().startswith("non-us/"):
734 component = component[7:];
736 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
737 % (package, suite_id, component_id, type_id));
738 result = q.getresult();
739 # If checking for a source package fall back on the binary override type
740 if type == "dsc" and not result:
741 deb_type_id = db_access.get_override_type_id("deb");
742 udeb_type_id = db_access.get_override_type_id("udeb");
743 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
744 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
745 result = q.getresult();
747 # Remember the section and priority so we can check them later if appropriate
749 files[file]["override section"] = result[0][0];
750 files[file]["override priority"] = result[0][1];
754 ################################################################################
756 def reject (self, str, prefix="Rejected: "):
758 # Unlike other rejects we add new lines first to avoid trailing
759 # new lines when this message is passed back up to a caller.
760 if self.reject_message:
761 self.reject_message += "\n";
762 self.reject_message += prefix + str;
764 ################################################################################
766 def get_anyversion(self, query_result, suite):
768 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
769 for (v, s) in query_result:
770 if s in [ string.lower(x) for x in anysuite ]:
771 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
775 ################################################################################
777 def cross_suite_version_check(self, query_result, file, new_version):
778 """Ensure versions are newer than existing packages in target
779 suites and that cross-suite version checking rules as
780 set out in the conf file are satisfied."""
782 # Check versions for each target suite
783 for target_suite in self.pkg.changes["distribution"].keys():
784 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
785 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
786 # Enforce "must be newer than target suite" even if conffile omits it
787 if target_suite not in must_be_newer_than:
788 must_be_newer_than.append(target_suite);
789 for entry in query_result:
790 existent_version = entry[0];
792 if suite in must_be_newer_than and \
793 apt_pkg.VersionCompare(new_version, existent_version) < 1:
794 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
795 if suite in must_be_older_than and \
796 apt_pkg.VersionCompare(new_version, existent_version) > -1:
797 ch = self.pkg.changes
799 if ch.get('distribution-version', {}).has_key(suite):
800 # we really use the other suite, ignoring the conflicting one ...
801 addsuite = ch["distribution-version"][suite]
803 add_version = self.get_anyversion(query_result, addsuite)
804 target_version = self.get_anyversion(query_result, target_suite)
807 # not add_version can only happen if we map to a suite
808 # that doesn't enhance the suite we're propup'ing from.
809 # so "propup-ver x a b c; map a d" is a problem only if
810 # d doesn't enhance a.
812 # i think we could always propagate in this case, rather
813 # than complaining. either way, this isn't a REJECT issue
815 # And - we really should complain to the dorks who configured dak
816 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
817 self.pkg.changes.setdefault("propdistribution", {})
818 self.pkg.changes["propdistribution"][addsuite] = 1
820 elif not target_version:
821 # not targets_version is true when the package is NEW
822 # we could just stick with the "...old version..." REJECT
824 self.reject("Won't propogate NEW packages.")
825 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
826 # propogation would be redundant. no need to reject though.
827 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
829 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
830 apt_pkg.VersionCompare(add_version, target_version) >= 0:
832 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
833 self.pkg.changes.setdefault("propdistribution", {})
834 self.pkg.changes["propdistribution"][addsuite] = 1
838 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
840 ################################################################################
842 def check_binary_against_db(self, file):
843 self.reject_message = "";
844 files = self.pkg.files;
846 # Ensure version is sane
847 q = self.projectB.query("""
848 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
850 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
851 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
852 % (files[file]["package"],
853 files[file]["architecture"]));
854 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
856 # Check for any existing copies of the file
857 q = self.projectB.query("""
858 SELECT b.id FROM binaries b, architecture a
859 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
860 AND a.id = b.architecture"""
861 % (files[file]["package"],
862 files[file]["version"],
863 files[file]["architecture"]))
865 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
867 return self.reject_message;
869 ################################################################################
871 def check_source_against_db(self, file):
872 self.reject_message = "";
875 # Ensure version is sane
876 q = self.projectB.query("""
877 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
878 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
879 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
881 return self.reject_message;
883 ################################################################################
886 # NB: this function can remove entries from the 'files' index [if
887 # the .orig.tar.gz is a duplicate of the one in the archive]; if
888 # you're iterating over 'files' and call this function as part of
889 # the loop, be sure to add a check to the top of the loop to
890 # ensure you haven't just tried to derefernece the deleted entry.
893 def check_dsc_against_db(self, file):
894 self.reject_message = "";
895 files = self.pkg.files;
896 dsc_files = self.pkg.dsc_files;
897 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
898 self.pkg.orig_tar_gz = None;
900 # Try and find all files mentioned in the .dsc. This has
901 # to work harder to cope with the multiple possible
902 # locations of an .orig.tar.gz.
903 for dsc_file in dsc_files.keys():
905 if files.has_key(dsc_file):
906 actual_md5 = files[dsc_file]["md5sum"];
907 actual_size = int(files[dsc_file]["size"]);
908 found = "%s in incoming" % (dsc_file)
909 # Check the file does not already exist in the archive
910 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
912 # Strip out anything that isn't '%s' or '/%s$'
914 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
917 # "[katie] has not broken them. [katie] has fixed a
918 # brokenness. Your crappy hack exploited a bug in
921 # "(Come on! I thought it was always obvious that
922 # one just doesn't release different files with
923 # the same name and version.)"
924 # -- ajk@ on d-devel@l.d.o
927 # Ignore exact matches for .orig.tar.gz
929 if dsc_file.endswith(".orig.tar.gz"):
931 if files.has_key(dsc_file) and \
932 int(files[dsc_file]["size"]) == int(i[0]) and \
933 files[dsc_file]["md5sum"] == i[1]:
934 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
936 self.pkg.orig_tar_gz = i[2] + i[3];
940 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
941 elif dsc_file.endswith(".orig.tar.gz"):
943 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
945 # Strip out anything that isn't '%s' or '/%s$'
947 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
951 # Unfortunately, we may get more than one match here if,
952 # for example, the package was in potato but had an -sa
953 # upload in woody. So we need to choose the right one.
955 x = ql[0]; # default to something sane in case we don't match any or have only one
959 old_file = i[0] + i[1];
960 old_file_fh = utils.open_file(old_file)
961 actual_md5 = apt_pkg.md5sum(old_file_fh);
963 actual_size = os.stat(old_file)[stat.ST_SIZE];
964 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
967 legacy_source_untouchable[i[3]] = "";
969 old_file = x[0] + x[1];
970 old_file_fh = utils.open_file(old_file)
971 actual_md5 = apt_pkg.md5sum(old_file_fh);
973 actual_size = os.stat(old_file)[stat.ST_SIZE];
976 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
977 # See install() in katie...
978 self.pkg.orig_tar_id = x[3];
979 self.pkg.orig_tar_gz = old_file;
980 if suite_type == "legacy" or suite_type == "legacy-mixed":
981 self.pkg.orig_tar_location = "legacy";
983 self.pkg.orig_tar_location = x[4];
985 # Not there? Check the queue directories...
987 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
988 # See process_it() in jennifer for explanation of this
989 if os.path.exists(in_unchecked):
990 return (self.reject_message, in_unchecked);
992 for dir in [ "Accepted", "New", "Byhand" ]:
993 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
994 if os.path.exists(in_otherdir):
995 in_otherdir_fh = utils.open_file(in_otherdir)
996 actual_md5 = apt_pkg.md5sum(in_otherdir_fh);
997 in_otherdir_fh.close()
998 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
1000 self.pkg.orig_tar_gz = in_otherdir;
1003 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
1004 self.pkg.orig_tar_gz = -1;
1007 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
1009 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1010 self.reject("md5sum for %s doesn't match %s." % (found, file));
1011 if actual_size != int(dsc_files[dsc_file]["size"]):
1012 self.reject("size for %s doesn't match %s." % (found, file));
1014 return (self.reject_message, None);
1016 def do_query(self, q):
1017 sys.stderr.write("query: \"%s\" ... " % (q));
1018 before = time.time();
1019 r = self.projectB.query(q);
1020 time_diff = time.time()-before;
1021 sys.stderr.write("took %.3f seconds.\n" % (time_diff));