3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.53 2005-01-18 22:18:55 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile(r"\n\n");
34 re_bin_only_nmu_of_mu = re.compile(r"\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile(r"\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 i = utils.fix_maintainer (dsc.get("maintainer",
70 Cnf["Dinstall::MyEmailAddress"]).lower());
71 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
72 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
73 if dsc_name == changes["maintainername"].lower() and \
74 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
77 if dsc.has_key("uploaders"):
78 uploaders = dsc["uploaders"].lower().split(",");
81 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
82 uploadernames[name] = "";
83 if uploadernames.has_key(changes["changedbyname"].lower()):
86 # Some group maintained packages (e.g. Debian QA) are never NMU's
87 if self.group_maint.has_key(changes["maintaineremail"].lower()):
92 ###############################################################################
96 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
122 self.pkg.orig_tar_gz = None;
124 ###########################################################################
126 def update_vars (self):
127 dump_filename = self.pkg.changes_file[:-8]+".katie";
128 dump_file = utils.open_file(dump_filename);
129 p = cPickle.Unpickler(dump_file);
130 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
131 exec "self.pkg.%s.update(p.load());" % (i);
132 for i in [ "orig_tar_id", "orig_tar_location" ]:
133 exec "self.pkg.%s = p.load();" % (i);
136 ###########################################################################
138 # This could just dump the dictionaries as is, but I'd like to avoid
139 # this so there's some idea of what katie & lisa use from jennifer
141 def dump_vars(self, dest_dir):
142 for i in [ "changes", "dsc", "files", "dsc_files",
143 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
144 exec "%s = self.pkg.%s;" % (i,i);
145 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
146 dump_file = utils.open_file(dump_filename, 'w');
148 os.chmod(dump_filename, 0660);
150 if errno.errorcode[e.errno] == 'EPERM':
151 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
152 if perms & stat.S_IROTH:
153 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
157 p = cPickle.Pickler(dump_file, 1);
158 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
161 for file in files.keys():
163 for i in [ "package", "version", "architecture", "type", "size",
164 "md5sum", "component", "location id", "source package",
165 "source version", "maintainer", "dbtype", "files id",
166 "new", "section", "priority", "othercomponents",
167 "pool name", "original component" ]:
168 if files[file].has_key(i):
169 d_files[file][i] = files[file][i];
171 # Mandatory changes fields
172 for i in [ "distribution", "source", "architecture", "version",
173 "maintainer", "urgency", "fingerprint", "changedby822",
174 "changedby2047", "changedbyname", "maintainer822",
175 "maintainer2047", "maintainername", "maintaineremail",
176 "closes", "changes" ]:
177 d_changes[i] = changes[i];
178 # Optional changes fields
179 for i in [ "changed-by", "filecontents", "format", "lisa note", "distribution-version" ]:
180 if changes.has_key(i):
181 d_changes[i] = changes[i];
183 for i in [ "source", "version", "maintainer", "fingerprint",
184 "uploaders", "bts changelog" ]:
188 for file in dsc_files.keys():
189 d_dsc_files[file] = {};
190 # Mandatory dsc_files fields
191 for i in [ "size", "md5sum" ]:
192 d_dsc_files[file][i] = dsc_files[file][i];
193 # Optional dsc_files fields
194 for i in [ "files id" ]:
195 if dsc_files[file].has_key(i):
196 d_dsc_files[file][i] = dsc_files[file][i];
198 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
199 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
203 ###########################################################################
205 # Set up the per-package template substitution mappings
207 def update_subst (self, reject_message = ""):
209 changes = self.pkg.changes;
210 # If jennifer crashed out in the right place, architecture may still be a string.
211 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
212 changes["architecture"] = { "Unknown" : "" };
213 # and maintainer2047 may not exist.
214 if not changes.has_key("maintainer2047"):
215 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
217 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
218 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
219 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
221 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
222 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
223 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
224 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
225 changes["maintainer2047"]);
226 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
228 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
229 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
230 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
231 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
232 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
234 # Apply any global override of the Maintainer field
235 if self.Cnf.get("Dinstall::OverrideMaintainer"):
236 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
237 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
239 Subst["__REJECT_MESSAGE__"] = reject_message;
240 Subst["__SOURCE__"] = changes.get("source", "Unknown");
241 Subst["__VERSION__"] = changes.get("version", "Unknown");
243 ###########################################################################
245 def build_summaries(self):
246 changes = self.pkg.changes;
247 files = self.pkg.files;
249 byhand = summary = new = "";
251 # changes["distribution"] may not exist in corner cases
252 # (e.g. unreadable changes files)
253 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
254 changes["distribution"] = {};
256 file_keys = files.keys();
258 for file in file_keys:
259 if files[file].has_key("byhand"):
261 summary += file + " byhand\n"
262 elif files[file].has_key("new"):
264 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265 if files[file].has_key("othercomponents"):
266 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267 if files[file]["type"] == "deb":
268 deb_fh = utils.open_file(file)
269 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n';
272 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274 summary += file + "\n to " + destination + "\n"
276 short_summary = summary;
278 # This is for direport's benefit...
279 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
282 summary += "Changes: " + f;
284 summary += self.announce(short_summary, 0)
286 return (summary, short_summary);
288 ###########################################################################
290 def close_bugs (self, summary, action):
291 changes = self.pkg.changes;
295 bugs = changes["closes"].keys();
301 if not self.nmu.is_an_nmu(self.pkg):
302 if changes["distribution"].has_key("experimental"):
303 # tag bugs as fixed-in-experimental for uploads to experimental
304 summary += "Setting bugs to severity fixed: ";
305 control_message = "";
307 summary += "%s " % (bug);
308 control_message += "tag %s + fixed-in-experimental\n" % (bug);
309 if action and control_message != "":
310 Subst["__CONTROL_MESSAGE__"] = control_message;
311 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
312 utils.send_mail (mail_message);
314 self.Logger.log(["setting bugs to fixed"]+bugs);
318 summary += "Closing bugs: ";
320 summary += "%s " % (bug);
322 Subst["__BUG_NUMBER__"] = bug;
323 if changes["distribution"].has_key("stable"):
324 Subst["__STABLE_WARNING__"] = """
325 Note that this package is not part of the released stable Debian
326 distribution. It may have dependencies on other unreleased software,
327 or other instabilities. Please take care if you wish to install it.
328 The update will eventually make its way into the next released Debian
331 Subst["__STABLE_WARNING__"] = "";
332 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
333 utils.send_mail (mail_message);
335 self.Logger.log(["closing bugs"]+bugs);
338 summary += "Setting bugs to severity fixed: ";
339 control_message = "";
341 summary += "%s " % (bug);
342 control_message += "tag %s + fixed\n" % (bug);
343 if action and control_message != "":
344 Subst["__CONTROL_MESSAGE__"] = control_message;
345 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
346 utils.send_mail (mail_message);
348 self.Logger.log(["setting bugs to fixed"]+bugs);
352 ###########################################################################
354 def announce (self, short_summary, action):
357 changes = self.pkg.changes;
359 # Only do announcements for source uploads with a recent dpkg-dev installed
360 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
365 Subst["__SHORT_SUMMARY__"] = short_summary;
367 for dist in changes["distribution"].keys():
368 list = Cnf.Find("Suite::%s::Announce" % (dist));
369 if list == "" or lists_done.has_key(list):
371 lists_done[list] = 1;
372 summary += "Announcing to %s\n" % (list);
375 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
376 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
377 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
378 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
379 utils.send_mail (mail_message);
381 if Cnf.FindB("Dinstall::CloseBugs"):
382 summary = self.close_bugs(summary, action);
386 ###########################################################################
388 def accept (self, summary, short_summary):
391 files = self.pkg.files;
392 changes = self.pkg.changes;
393 changes_file = self.pkg.changes_file;
397 self.Logger.log(["Accepting changes",changes_file]);
399 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
401 # Move all the files into the accepted directory
402 utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
403 file_keys = files.keys();
404 for file in file_keys:
405 utils.move(file, Cnf["Dir::Queue::Accepted"]);
406 self.accept_bytes += float(files[file]["size"])
407 self.accept_count += 1;
409 # Send accept mail, announce to lists, close bugs and check for
410 # override disparities
411 if not Cnf["Dinstall::Options::No-Mail"]:
412 Subst["__SUITE__"] = "";
413 Subst["__SUMMARY__"] = summary;
414 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
415 utils.send_mail(mail_message)
416 self.announce(short_summary, 1)
419 ## Helper stuff for DebBugs Version Tracking
420 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
421 # ??? once queue/* is cleared on *.d.o and/or reprocessed
422 # the conditionalization on dsc["bts changelog"] should be
425 # Write out the version history from the changelog
426 if changes["architecture"].has_key("source") and \
427 dsc.has_key("bts changelog"):
429 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
430 dotprefix=1, perms=0644);
431 version_history = utils.open_file(temp_filename, 'w');
432 version_history.write(dsc["bts changelog"]);
433 version_history.close();
434 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
435 changes_file[:-8]+".versions");
436 os.rename(temp_filename, filename);
438 # Write out the binary -> source mapping.
439 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
440 dotprefix=1, perms=0644);
441 debinfo = utils.open_file(temp_filename, 'w');
442 for file in file_keys:
444 if f["type"] == "deb":
445 line = " ".join([f["package"], f["version"],
446 f["architecture"], f["source package"],
447 f["source version"]]);
448 debinfo.write(line+"\n");
450 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
451 changes_file[:-8]+".debinfo");
452 os.rename(temp_filename, filename);
454 ## Special support to enable clean auto-building of accepted packages
455 self.projectB.query("BEGIN WORK");
456 for suite in changes["distribution"].keys():
457 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
459 suite_id = db_access.get_suite_id(suite);
460 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
461 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
462 dest_dir = os.path.join(dest_dir, suite);
463 for file in file_keys:
464 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
465 dest = os.path.join(dest_dir, file);
466 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
467 # Copy it since the original won't be readable by www-data
468 utils.copy(src, dest);
470 # Create a symlink to it
471 os.symlink(src, dest);
472 # Add it to the list of packages for later processing by apt-ftparchive
473 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
474 # If the .orig.tar.gz is in the pool, create a symlink to
475 # it (if one doesn't already exist)
476 if self.pkg.orig_tar_id:
477 # Determine the .orig.tar.gz file name
478 for dsc_file in self.pkg.dsc_files.keys():
479 if dsc_file.endswith(".orig.tar.gz"):
481 dest = os.path.join(dest_dir, filename);
482 # If it doesn't exist, create a symlink
483 if not os.path.exists(dest):
484 # Find the .orig.tar.gz in the pool
485 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
488 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
489 src = os.path.join(ql[0][0], ql[0][1]);
490 os.symlink(src, dest);
491 # Add it to the list of packages for later processing by apt-ftparchive
492 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
493 # if it does, update things to ensure it's not removed prematurely
495 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
497 self.projectB.query("COMMIT WORK");
499 ###########################################################################
501 def check_override (self):
503 changes = self.pkg.changes;
504 files = self.pkg.files;
507 # Abandon the check if:
508 # a) it's a non-sourceful upload
509 # b) override disparity checks have been disabled
510 # c) we're not sending mail
511 if not changes["architecture"].has_key("source") or \
512 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
513 Cnf["Dinstall::Options::No-Mail"]:
517 file_keys = files.keys();
519 for file in file_keys:
520 if not files[file].has_key("new") and files[file]["type"] == "deb":
521 section = files[file]["section"];
522 override_section = files[file]["override section"];
523 if section.lower() != override_section.lower() and section != "-":
524 # Ignore this; it's a common mistake and not worth whining about
525 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
527 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
528 priority = files[file]["priority"];
529 override_priority = files[file]["override priority"];
530 if priority != override_priority and priority != "-":
531 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
536 Subst["__SUMMARY__"] = summary;
537 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
538 utils.send_mail(mail_message);
540 ###########################################################################
542 def force_reject (self, files):
543 """Forcefully move files from the current directory to the
544 reject directory. If any file already exists in the reject
545 directory it will be moved to the morgue to make way for
551 # Skip any files which don't exist or which we don't have permission to copy.
552 if os.access(file,os.R_OK) == 0:
554 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
556 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
558 # File exists? Let's try and move it to the morgue
559 if errno.errorcode[e.errno] == 'EEXIST':
560 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
562 morgue_file = utils.find_next_free(morgue_file);
563 except utils.tried_too_hard_exc:
564 # Something's either gone badly Pete Tong, or
565 # someone is trying to exploit us.
566 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
568 utils.move(dest_file, morgue_file, perms=0660);
570 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
573 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
577 # If we got here, we own the destination file, so we can
578 # safely overwrite it.
579 utils.move(file, dest_file, 1, perms=0660);
582 ###########################################################################
584 def do_reject (self, manual = 0, reject_message = ""):
585 # If we weren't given a manual rejection message, spawn an
586 # editor so the user can add one in...
587 if manual and not reject_message:
588 temp_filename = utils.temp_filename();
589 editor = os.environ.get("EDITOR","vi")
592 os.system("%s %s" % (editor, temp_filename))
593 temp_fh = utils.open_file(temp_filename);
594 reject_message = "".join(temp_fh.readlines());
596 print "Reject message:";
597 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
598 prompt = "[R]eject, Edit, Abandon, Quit ?"
600 while prompt.find(answer) == -1:
601 answer = utils.our_raw_input(prompt);
602 m = re_default_answer.search(prompt);
605 answer = answer[:1].upper();
606 os.unlink(temp_filename);
618 reason_filename = pkg.changes_file[:-8] + ".reason";
619 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
621 # Move all the files into the reject directory
622 reject_files = pkg.files.keys() + [pkg.changes_file];
623 self.force_reject(reject_files);
625 # If we fail here someone is probably trying to exploit the race
626 # so let's just raise an exception ...
627 if os.path.exists(reason_filename):
628 os.unlink(reason_filename);
629 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
632 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
633 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
634 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
635 os.write(reason_fd, reject_message);
636 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
638 # Build up the rejection email
639 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
641 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
642 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
643 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
644 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
645 # Write the rejection email out as the <foo>.reason file
646 os.write(reason_fd, reject_mail_message);
650 # Send the rejection mail if appropriate
651 if not Cnf["Dinstall::Options::No-Mail"]:
652 utils.send_mail(reject_mail_message);
654 self.Logger.log(["rejected", pkg.changes_file]);
657 ################################################################################
659 # Ensure that source exists somewhere in the archive for the binary
660 # upload being processed.
662 # (1) exact match => 1.0-3
663 # (2) Bin-only NMU of an MU => 1.0-3.0.1
664 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
666 def source_exists (self, package, source_version, suites = ["any"]):
670 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
673 # source must exist in suite X, or in some other suite that's
674 # mapped to X, recursively... silent-maps are counted too,
675 # unreleased-maps aren't.
676 maps = self.Cnf.ValueList("SuiteMappings")[:]
678 maps = [ m.split() for m in maps ]
679 maps = [ (x[1], x[2]) for x in maps
680 if x[0] == "map" or x[0] == "silent-map" ]
683 if x[1] in s and x[0] not in s:
686 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
687 q = self.projectB.query(que)
689 # Reduce the query results to a list of version numbers
690 ql = map(lambda x: x[0], q.getresult());
693 if source_version in ql:
697 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
698 if orig_source_version in ql:
702 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
703 if orig_source_version in ql:
710 ################################################################################
712 def in_override_p (self, package, component, suite, binary_type, file):
713 files = self.pkg.files;
715 if binary_type == "": # must be source
720 # Override suite name; used for example with proposed-updates
721 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
722 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
724 # Avoid <undef> on unknown distributions
725 suite_id = db_access.get_suite_id(suite);
728 component_id = db_access.get_component_id(component);
729 type_id = db_access.get_override_type_id(type);
731 # FIXME: nasty non-US speficic hack
732 if component.lower().startswith("non-us/"):
733 component = component[7:];
735 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
736 % (package, suite_id, component_id, type_id));
737 result = q.getresult();
738 # If checking for a source package fall back on the binary override type
739 if type == "dsc" and not result:
740 deb_type_id = db_access.get_override_type_id("deb");
741 udeb_type_id = db_access.get_override_type_id("udeb");
742 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
743 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
744 result = q.getresult();
746 # Remember the section and priority so we can check them later if appropriate
748 files[file]["override section"] = result[0][0];
749 files[file]["override priority"] = result[0][1];
753 ################################################################################
755 def reject (self, str, prefix="Rejected: "):
757 # Unlike other rejects we add new lines first to avoid trailing
758 # new lines when this message is passed back up to a caller.
759 if self.reject_message:
760 self.reject_message += "\n";
761 self.reject_message += prefix + str;
763 ################################################################################
765 def get_anyversion(self, query_result, suite):
767 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
768 for (v, s) in query_result:
769 if s in [ string.lower(x) for x in anysuite ]:
770 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
774 ################################################################################
776 def cross_suite_version_check(self, query_result, file, new_version):
777 """Ensure versions are newer than existing packages in target
778 suites and that cross-suite version checking rules as
779 set out in the conf file are satisfied."""
781 # Check versions for each target suite
782 for target_suite in self.pkg.changes["distribution"].keys():
783 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
784 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
785 # Enforce "must be newer than target suite" even if conffile omits it
786 if target_suite not in must_be_newer_than:
787 must_be_newer_than.append(target_suite);
788 for entry in query_result:
789 existent_version = entry[0];
791 if suite in must_be_newer_than and \
792 apt_pkg.VersionCompare(new_version, existent_version) < 1:
793 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
794 if suite in must_be_older_than and \
795 apt_pkg.VersionCompare(new_version, existent_version) > -1:
796 ch = self.pkg.changes
798 if ch.get('distribution-version', {}).has_key(suite):
799 # we really use the other suite, ignoring the conflicting one ...
800 addsuite = ch["distribution-version"][suite]
802 add_version = self.get_anyversion(query_result, addsuite)
803 target_version = self.get_anyversion(query_result, target_suite)
806 # not add_version can only happen if we map to a suite
807 # that doesn't enhance the suite we're propup'ing from.
808 # so "propup-ver x a b c; map a d" is a problem only if
809 # d doesn't enhance a.
811 # i think we could always propagate in this case, rather
812 # than complaining. either way, this isn't a REJECT issue
814 # And - we really should complain to the dorks who configured dak
815 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
816 self.pkg.changes["distribution"][addsuite] = 1
818 elif not target_version:
819 # not targets_version is true when the package is NEW
820 # we could just stick with the "...old version..." REJECT
822 self.reject("Won't propogate NEW packages.")
823 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
824 # propogation would be redundant. no need to reject though.
825 #self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ");
826 self.reject("foo", "Warning: ")
828 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
829 apt_pkg.VersionCompare(add_version, target_version) == 0:
831 self.pkg.changes["distribution"][addsuite] = 1
835 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
837 ################################################################################
839 def check_binary_against_db(self, file):
840 self.reject_message = "";
841 files = self.pkg.files;
843 # Ensure version is sane
844 q = self.projectB.query("""
845 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
847 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
848 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
849 % (files[file]["package"],
850 files[file]["architecture"]));
851 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
853 # Check for any existing copies of the file
854 q = self.projectB.query("""
855 SELECT b.id FROM binaries b, architecture a
856 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
857 AND a.id = b.architecture"""
858 % (files[file]["package"],
859 files[file]["version"],
860 files[file]["architecture"]))
862 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
864 return self.reject_message;
866 ################################################################################
868 def check_source_against_db(self, file):
869 self.reject_message = "";
872 # Ensure version is sane
873 q = self.projectB.query("""
874 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
875 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
876 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
878 return self.reject_message;
880 ################################################################################
883 # NB: this function can remove entries from the 'files' index [if
884 # the .orig.tar.gz is a duplicate of the one in the archive]; if
885 # you're iterating over 'files' and call this function as part of
886 # the loop, be sure to add a check to the top of the loop to
887 # ensure you haven't just tried to derefernece the deleted entry.
890 def check_dsc_against_db(self, file):
891 self.reject_message = "";
892 files = self.pkg.files;
893 dsc_files = self.pkg.dsc_files;
894 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
895 self.pkg.orig_tar_gz = None;
897 # Try and find all files mentioned in the .dsc. This has
898 # to work harder to cope with the multiple possible
899 # locations of an .orig.tar.gz.
900 for dsc_file in dsc_files.keys():
902 if files.has_key(dsc_file):
903 actual_md5 = files[dsc_file]["md5sum"];
904 actual_size = int(files[dsc_file]["size"]);
905 found = "%s in incoming" % (dsc_file)
906 # Check the file does not already exist in the archive
907 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
909 # Strip out anything that isn't '%s' or '/%s$'
911 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
914 # "[katie] has not broken them. [katie] has fixed a
915 # brokenness. Your crappy hack exploited a bug in
918 # "(Come on! I thought it was always obvious that
919 # one just doesn't release different files with
920 # the same name and version.)"
921 # -- ajk@ on d-devel@l.d.o
924 # Ignore exact matches for .orig.tar.gz
926 if dsc_file.endswith(".orig.tar.gz"):
928 if files.has_key(dsc_file) and \
929 int(files[dsc_file]["size"]) == int(i[0]) and \
930 files[dsc_file]["md5sum"] == i[1]:
931 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
933 self.pkg.orig_tar_gz = i[2] + i[3];
937 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
938 elif dsc_file.endswith(".orig.tar.gz"):
940 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
942 # Strip out anything that isn't '%s' or '/%s$'
944 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
948 # Unfortunately, we may get more than one match here if,
949 # for example, the package was in potato but had an -sa
950 # upload in woody. So we need to choose the right one.
952 x = ql[0]; # default to something sane in case we don't match any or have only one
956 old_file = i[0] + i[1];
957 old_file_fh = utils.open_file(old_file)
958 actual_md5 = apt_pkg.md5sum(old_file_fh);
960 actual_size = os.stat(old_file)[stat.ST_SIZE];
961 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
964 legacy_source_untouchable[i[3]] = "";
966 old_file = x[0] + x[1];
967 old_file_fh = utils.open_file(old_file)
968 actual_md5 = apt_pkg.md5sum(old_file_fh);
970 actual_size = os.stat(old_file)[stat.ST_SIZE];
973 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
974 # See install() in katie...
975 self.pkg.orig_tar_id = x[3];
976 self.pkg.orig_tar_gz = old_file;
977 if suite_type == "legacy" or suite_type == "legacy-mixed":
978 self.pkg.orig_tar_location = "legacy";
980 self.pkg.orig_tar_location = x[4];
982 # Not there? Check the queue directories...
984 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
985 # See process_it() in jennifer for explanation of this
986 if os.path.exists(in_unchecked):
987 return (self.reject_message, in_unchecked);
989 for dir in [ "Accepted", "New", "Byhand" ]:
990 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
991 if os.path.exists(in_otherdir):
992 in_otherdir_fh = utils.open_file(in_otherdir)
993 actual_md5 = apt_pkg.md5sum(in_otherdir_fh);
994 in_otherdir_fh.close()
995 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
997 self.pkg.orig_tar_gz = in_otherdir;
1000 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
1001 self.pkg.orig_tar_gz = -1;
1004 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
1006 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1007 self.reject("md5sum for %s doesn't match %s." % (found, file));
1008 if actual_size != int(dsc_files[dsc_file]["size"]):
1009 self.reject("size for %s doesn't match %s." % (found, file));
1011 return (self.reject_message, None);
1013 def do_query(self, q):
1014 sys.stderr.write("query: \"%s\" ... " % (q));
1015 before = time.time();
1016 r = self.projectB.query(q);
1017 time_diff = time.time()-before;
1018 sys.stderr.write("took %.3f seconds.\n" % (time_diff));