3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.50 2004-11-27 16:07:07 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile("\n\n");
34 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 i = utils.fix_maintainer (dsc.get("maintainer",
70 Cnf["Dinstall::MyEmailAddress"]).lower());
71 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
72 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
73 if dsc_name == changes["maintainername"].lower() and \
74 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
77 if dsc.has_key("uploaders"):
78 uploaders = dsc["uploaders"].lower().split(",");
81 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
82 uploadernames[name] = "";
83 if uploadernames.has_key(changes["changedbyname"].lower()):
86 # Some group maintained packages (e.g. Debian QA) are never NMU's
87 if self.group_maint.has_key(changes["maintaineremail"].lower()):
92 ###############################################################################
96 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
122 self.pkg.orig_tar_gz = None;
124 ###########################################################################
126 def update_vars (self):
127 dump_filename = self.pkg.changes_file[:-8]+".katie";
128 dump_file = utils.open_file(dump_filename);
129 p = cPickle.Unpickler(dump_file);
130 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
131 exec "self.pkg.%s.update(p.load());" % (i);
132 for i in [ "orig_tar_id", "orig_tar_location" ]:
133 exec "self.pkg.%s = p.load();" % (i);
136 ###########################################################################
138 # This could just dump the dictionaries as is, but I'd like to avoid
139 # this so there's some idea of what katie & lisa use from jennifer
141 def dump_vars(self, dest_dir):
142 for i in [ "changes", "dsc", "files", "dsc_files",
143 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
144 exec "%s = self.pkg.%s;" % (i,i);
145 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
146 dump_file = utils.open_file(dump_filename, 'w');
148 os.chmod(dump_filename, 0660);
150 if errno.errorcode[e.errno] == 'EPERM':
151 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
152 if perms & stat.S_IROTH:
153 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
157 p = cPickle.Pickler(dump_file, 1);
158 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
161 for file in files.keys():
163 for i in [ "package", "version", "architecture", "type", "size",
164 "md5sum", "component", "location id", "source package",
165 "source version", "maintainer", "dbtype", "files id",
166 "new", "section", "priority", "othercomponents",
167 "pool name", "original component" ]:
168 if files[file].has_key(i):
169 d_files[file][i] = files[file][i];
171 # Mandatory changes fields
172 for i in [ "distribution", "source", "architecture", "version",
173 "maintainer", "urgency", "fingerprint", "changedby822",
174 "changedby2047", "changedbyname", "maintainer822",
175 "maintainer2047", "maintainername", "maintaineremail",
176 "closes", "changes" ]:
177 d_changes[i] = changes[i];
178 # Optional changes fields
179 for i in [ "changed-by", "filecontents", "format", "lisa note" ]:
180 if changes.has_key(i):
181 d_changes[i] = changes[i];
183 for i in [ "source", "version", "maintainer", "fingerprint",
184 "uploaders", "bts changelog" ]:
188 for file in dsc_files.keys():
189 d_dsc_files[file] = {};
190 # Mandatory dsc_files fields
191 for i in [ "size", "md5sum" ]:
192 d_dsc_files[file][i] = dsc_files[file][i];
193 # Optional dsc_files fields
194 for i in [ "files id" ]:
195 if dsc_files[file].has_key(i):
196 d_dsc_files[file][i] = dsc_files[file][i];
198 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
199 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
203 ###########################################################################
205 # Set up the per-package template substitution mappings
207 def update_subst (self, reject_message = ""):
209 changes = self.pkg.changes;
210 # If jennifer crashed out in the right place, architecture may still be a string.
211 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
212 changes["architecture"] = { "Unknown" : "" };
213 # and maintainer2047 may not exist.
214 if not changes.has_key("maintainer2047"):
215 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
217 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
218 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
219 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
221 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
222 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
223 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
224 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
225 changes["maintainer2047"]);
226 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
228 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
229 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
230 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
231 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
232 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
234 # Apply any global override of the Maintainer field
235 if self.Cnf.get("Dinstall::OverrideMaintainer"):
236 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
237 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
239 Subst["__REJECT_MESSAGE__"] = reject_message;
240 Subst["__SOURCE__"] = changes.get("source", "Unknown");
241 Subst["__VERSION__"] = changes.get("version", "Unknown");
243 ###########################################################################
245 def build_summaries(self):
246 changes = self.pkg.changes;
247 files = self.pkg.files;
249 byhand = summary = new = "";
251 # changes["distribution"] may not exist in corner cases
252 # (e.g. unreadable changes files)
253 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
254 changes["distribution"] = {};
256 file_keys = files.keys();
258 for file in file_keys:
259 if files[file].has_key("byhand"):
261 summary += file + " byhand\n"
262 elif files[file].has_key("new"):
264 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265 if files[file].has_key("othercomponents"):
266 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267 if files[file]["type"] == "deb":
268 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
270 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
271 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
272 summary += file + "\n to " + destination + "\n"
274 short_summary = summary;
276 # This is for direport's benefit...
277 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
280 summary += "Changes: " + f;
282 summary += self.announce(short_summary, 0)
284 return (summary, short_summary);
286 ###########################################################################
288 def close_bugs (self, summary, action):
289 changes = self.pkg.changes;
293 bugs = changes["closes"].keys();
299 if not self.nmu.is_an_nmu(self.pkg):
300 if changes["distribution"].has_key("experimental"):
301 # tag bugs as fixed-in-experimental for uploads to experimental
302 summary += "Setting bugs to severity fixed: ";
303 control_message = "";
305 summary += "%s " % (bug);
306 control_message += "tag %s + fixed-in-experimental\n" % (bug);
307 if action and control_message != "":
308 Subst["__CONTROL_MESSAGE__"] = control_message;
309 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
310 utils.send_mail (mail_message);
312 self.Logger.log(["setting bugs to fixed"]+bugs);
316 summary += "Closing bugs: ";
318 summary += "%s " % (bug);
320 Subst["__BUG_NUMBER__"] = bug;
321 if changes["distribution"].has_key("stable"):
322 Subst["__STABLE_WARNING__"] = """
323 Note that this package is not part of the released stable Debian
324 distribution. It may have dependencies on other unreleased software,
325 or other instabilities. Please take care if you wish to install it.
326 The update will eventually make its way into the next released Debian
329 Subst["__STABLE_WARNING__"] = "";
330 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
331 utils.send_mail (mail_message);
333 self.Logger.log(["closing bugs"]+bugs);
336 summary += "Setting bugs to severity fixed: ";
337 control_message = "";
339 summary += "%s " % (bug);
340 control_message += "tag %s + fixed\n" % (bug);
341 if action and control_message != "":
342 Subst["__CONTROL_MESSAGE__"] = control_message;
343 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
344 utils.send_mail (mail_message);
346 self.Logger.log(["setting bugs to fixed"]+bugs);
350 ###########################################################################
352 def announce (self, short_summary, action):
355 changes = self.pkg.changes;
357 # Only do announcements for source uploads with a recent dpkg-dev installed
358 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
363 Subst["__SHORT_SUMMARY__"] = short_summary;
365 for dist in changes["distribution"].keys():
366 list = Cnf.Find("Suite::%s::Announce" % (dist));
367 if list == "" or lists_done.has_key(list):
369 lists_done[list] = 1;
370 summary += "Announcing to %s\n" % (list);
373 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
374 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
375 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
376 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
377 utils.send_mail (mail_message);
379 if Cnf.FindB("Dinstall::CloseBugs"):
380 summary = self.close_bugs(summary, action);
384 ###########################################################################
386 def accept (self, summary, short_summary):
389 files = self.pkg.files;
390 changes = self.pkg.changes;
391 changes_file = self.pkg.changes_file;
395 self.Logger.log(["Accepting changes",changes_file]);
397 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
399 # Move all the files into the accepted directory
400 utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
401 file_keys = files.keys();
402 for file in file_keys:
403 utils.move(file, Cnf["Dir::Queue::Accepted"]);
404 self.accept_bytes += float(files[file]["size"])
405 self.accept_count += 1;
407 # Send accept mail, announce to lists, close bugs and check for
408 # override disparities
409 if not Cnf["Dinstall::Options::No-Mail"]:
410 Subst["__SUITE__"] = "";
411 Subst["__SUMMARY__"] = summary;
412 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
413 utils.send_mail(mail_message)
414 self.announce(short_summary, 1)
417 ## Helper stuff for DebBugs Version Tracking
418 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
419 # ??? once queue/* is cleared on *.d.o and/or reprocessed
420 # the conditionalization on dsc["bts changelog"] should be
423 # Write out the version history from the changelog
424 if changes["architecture"].has_key("source") and \
425 dsc.has_key("bts changelog"):
427 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
428 dotprefix=1, perms=0644);
429 version_history = utils.open_file(temp_filename, 'w');
430 version_history.write(dsc["bts changelog"]);
431 version_history.close();
432 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
433 changes_file[:-8]+".versions");
434 os.rename(temp_filename, filename);
436 # Write out the binary -> source mapping.
437 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
438 dotprefix=1, perms=0644);
439 debinfo = utils.open_file(temp_filename, 'w');
440 for file in file_keys:
442 if f["type"] == "deb":
443 line = " ".join([f["package"], f["version"],
444 f["architecture"], f["source package"],
445 f["source version"]]);
446 debinfo.write(line+"\n");
448 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
449 changes_file[:-8]+".debinfo");
450 os.rename(temp_filename, filename);
452 ## Special support to enable clean auto-building of accepted packages
453 self.projectB.query("BEGIN WORK");
454 for suite in changes["distribution"].keys():
455 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
457 suite_id = db_access.get_suite_id(suite);
458 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
459 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
460 dest_dir = os.path.join(dest_dir, suite);
461 for file in file_keys:
462 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
463 dest = os.path.join(dest_dir, file);
464 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
465 # Copy it since the original won't be readable by www-data
466 utils.copy(src, dest);
468 # Create a symlink to it
469 os.symlink(src, dest);
470 # Add it to the list of packages for later processing by apt-ftparchive
471 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
472 # If the .orig.tar.gz is in the pool, create a symlink to
473 # it (if one doesn't already exist)
474 if self.pkg.orig_tar_id:
475 # Determine the .orig.tar.gz file name
476 for dsc_file in self.pkg.dsc_files.keys():
477 if dsc_file.endswith(".orig.tar.gz"):
479 dest = os.path.join(dest_dir, filename);
480 # If it doesn't exist, create a symlink
481 if not os.path.exists(dest):
482 # Find the .orig.tar.gz in the pool
483 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
486 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
487 src = os.path.join(ql[0][0], ql[0][1]);
488 os.symlink(src, dest);
489 # Add it to the list of packages for later processing by apt-ftparchive
490 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
491 # if it does, update things to ensure it's not removed prematurely
493 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
495 self.projectB.query("COMMIT WORK");
497 ###########################################################################
499 def check_override (self):
501 changes = self.pkg.changes;
502 files = self.pkg.files;
505 # Abandon the check if:
506 # a) it's a non-sourceful upload
507 # b) override disparity checks have been disabled
508 # c) we're not sending mail
509 if not changes["architecture"].has_key("source") or \
510 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
511 Cnf["Dinstall::Options::No-Mail"]:
515 file_keys = files.keys();
517 for file in file_keys:
518 if not files[file].has_key("new") and files[file]["type"] == "deb":
519 section = files[file]["section"];
520 override_section = files[file]["override section"];
521 if section.lower() != override_section.lower() and section != "-":
522 # Ignore this; it's a common mistake and not worth whining about
523 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
525 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
526 priority = files[file]["priority"];
527 override_priority = files[file]["override priority"];
528 if priority != override_priority and priority != "-":
529 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
534 Subst["__SUMMARY__"] = summary;
535 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
536 utils.send_mail(mail_message);
538 ###########################################################################
540 def force_reject (self, files):
541 """Forcefully move files from the current directory to the
542 reject directory. If any file already exists in the reject
543 directory it will be moved to the morgue to make way for
549 # Skip any files which don't exist or which we don't have permission to copy.
550 if os.access(file,os.R_OK) == 0:
552 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
554 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
556 # File exists? Let's try and move it to the morgue
557 if errno.errorcode[e.errno] == 'EEXIST':
558 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
560 morgue_file = utils.find_next_free(morgue_file);
561 except utils.tried_too_hard_exc:
562 # Something's either gone badly Pete Tong, or
563 # someone is trying to exploit us.
564 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
566 utils.move(dest_file, morgue_file, perms=0660);
568 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
571 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
575 # If we got here, we own the destination file, so we can
576 # safely overwrite it.
577 utils.move(file, dest_file, 1, perms=0660);
579 ###########################################################################
581 def do_reject (self, manual = 0, reject_message = ""):
582 # If we weren't given a manual rejection message, spawn an
583 # editor so the user can add one in...
584 if manual and not reject_message:
585 temp_filename = utils.temp_filename();
586 editor = os.environ.get("EDITOR","vi")
589 os.system("%s %s" % (editor, temp_filename))
590 file = utils.open_file(temp_filename);
591 reject_message = "".join(file.readlines());
593 print "Reject message:";
594 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
595 prompt = "[R]eject, Edit, Abandon, Quit ?"
597 while prompt.find(answer) == -1:
598 answer = utils.our_raw_input(prompt);
599 m = re_default_answer.search(prompt);
602 answer = answer[:1].upper();
603 os.unlink(temp_filename);
615 reason_filename = pkg.changes_file[:-8] + ".reason";
616 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
618 # Move all the files into the reject directory
619 reject_files = pkg.files.keys() + [pkg.changes_file];
620 self.force_reject(reject_files);
622 # If we fail here someone is probably trying to exploit the race
623 # so let's just raise an exception ...
624 if os.path.exists(reason_filename):
625 os.unlink(reason_filename);
626 reason_file = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
629 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
630 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
631 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
632 os.write(reason_file, reject_message);
633 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
635 # Build up the rejection email
636 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
638 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
639 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
640 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
641 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
642 # Write the rejection email out as the <foo>.reason file
643 os.write(reason_file, reject_mail_message);
645 os.close(reason_file);
647 # Send the rejection mail if appropriate
648 if not Cnf["Dinstall::Options::No-Mail"]:
649 utils.send_mail(reject_mail_message);
651 self.Logger.log(["rejected", pkg.changes_file]);
654 ################################################################################
656 # Ensure that source exists somewhere in the archive for the binary
657 # upload being processed.
659 # (1) exact match => 1.0-3
660 # (2) Bin-only NMU of an MU => 1.0-3.0.1
661 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
663 def source_exists (self, package, source_version, suites = ["any"]):
667 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
670 # source must exist in suite X, or in some other suite that's
671 # mapped to X, recursively... silent-maps are counted too,
672 # unreleased-maps aren't.
673 maps = self.Cnf.ValueList("SuiteMappings")[:]
675 maps = [ m.split() for m in maps ]
676 maps = [ (x[1], x[2]) for x in maps
677 if x[0] == "map" or x[0] == "silent-map" ]
680 if x[1] in s and x[0] not in s:
683 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
684 q = self.projectB.query(que)
686 # Reduce the query results to a list of version numbers
687 ql = map(lambda x: x[0], q.getresult());
690 if source_version in ql:
694 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
695 if orig_source_version in ql:
699 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
700 if orig_source_version in ql:
707 ################################################################################
709 def in_override_p (self, package, component, suite, binary_type, file):
710 files = self.pkg.files;
712 if binary_type == "": # must be source
717 # Override suite name; used for example with proposed-updates
718 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
719 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
721 # Avoid <undef> on unknown distributions
722 suite_id = db_access.get_suite_id(suite);
725 component_id = db_access.get_component_id(component);
726 type_id = db_access.get_override_type_id(type);
728 # FIXME: nasty non-US speficic hack
729 if component.lower().startswith("non-us/"):
730 component = component[7:];
732 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
733 % (package, suite_id, component_id, type_id));
734 result = q.getresult();
735 # If checking for a source package fall back on the binary override type
736 if type == "dsc" and not result:
737 deb_type_id = db_access.get_override_type_id("deb");
738 udeb_type_id = db_access.get_override_type_id("udeb");
739 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
740 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
741 result = q.getresult();
743 # Remember the section and priority so we can check them later if appropriate
745 files[file]["override section"] = result[0][0];
746 files[file]["override priority"] = result[0][1];
750 ################################################################################
752 def reject (self, str, prefix="Rejected: "):
754 # Unlike other rejects we add new lines first to avoid trailing
755 # new lines when this message is passed back up to a caller.
756 if self.reject_message:
757 self.reject_message += "\n";
758 self.reject_message += prefix + str;
760 ################################################################################
762 def cross_suite_version_check(self, query_result, file, new_version):
763 """Ensure versions are newer than existing packages in target
764 suites and that cross-suite version checking rules as
765 set out in the conf file are satisfied."""
767 # Check versions for each target suite
768 for target_suite in self.pkg.changes["distribution"].keys():
769 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
770 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
771 # Enforce "must be newer than target suite" even if conffile omits it
772 if target_suite not in must_be_newer_than:
773 must_be_newer_than.append(target_suite);
774 for entry in query_result:
775 existent_version = entry[0];
777 if suite in must_be_newer_than and \
778 apt_pkg.VersionCompare(new_version, existent_version) < 1:
779 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
780 if suite in must_be_older_than and \
781 apt_pkg.VersionCompare(new_version, existent_version) > -1:
782 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
784 ################################################################################
786 def check_binary_against_db(self, file):
787 self.reject_message = "";
788 files = self.pkg.files;
790 # Ensure version is sane
791 q = self.projectB.query("""
792 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
794 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
795 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
796 % (files[file]["package"],
797 files[file]["architecture"]));
798 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
800 # Check for any existing copies of the file
801 q = self.projectB.query("""
802 SELECT b.id FROM binaries b, architecture a
803 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
804 AND a.id = b.architecture"""
805 % (files[file]["package"],
806 files[file]["version"],
807 files[file]["architecture"]))
809 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
811 return self.reject_message;
813 ################################################################################
815 def check_source_against_db(self, file):
816 self.reject_message = "";
819 # Ensure version is sane
820 q = self.projectB.query("""
821 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
822 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
823 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
825 return self.reject_message;
827 ################################################################################
830 # NB: this function can remove entries from the 'files' index [if
831 # the .orig.tar.gz is a duplicate of the one in the archive]; if
832 # you're iterating over 'files' and call this function as part of
833 # the loop, be sure to add a check to the top of the loop to
834 # ensure you haven't just tried to derefernece the deleted entry.
837 def check_dsc_against_db(self, file):
838 self.reject_message = "";
839 files = self.pkg.files;
840 dsc_files = self.pkg.dsc_files;
841 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
842 self.pkg.orig_tar_gz = None;
844 # Try and find all files mentioned in the .dsc. This has
845 # to work harder to cope with the multiple possible
846 # locations of an .orig.tar.gz.
847 for dsc_file in dsc_files.keys():
849 if files.has_key(dsc_file):
850 actual_md5 = files[dsc_file]["md5sum"];
851 actual_size = int(files[dsc_file]["size"]);
852 found = "%s in incoming" % (dsc_file)
853 # Check the file does not already exist in the archive
854 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
856 # Strip out anything that isn't '%s' or '/%s$'
858 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
861 # "[katie] has not broken them. [katie] has fixed a
862 # brokenness. Your crappy hack exploited a bug in
865 # "(Come on! I thought it was always obvious that
866 # one just doesn't release different files with
867 # the same name and version.)"
868 # -- ajk@ on d-devel@l.d.o
871 # Ignore exact matches for .orig.tar.gz
873 if dsc_file.endswith(".orig.tar.gz"):
875 if files.has_key(dsc_file) and \
876 int(files[dsc_file]["size"]) == int(i[0]) and \
877 files[dsc_file]["md5sum"] == i[1]:
878 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
880 self.pkg.orig_tar_gz = i[2] + i[3];
884 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
885 elif dsc_file.endswith(".orig.tar.gz"):
887 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
889 # Strip out anything that isn't '%s' or '/%s$'
891 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
895 # Unfortunately, we may get more than one match here if,
896 # for example, the package was in potato but had an -sa
897 # upload in woody. So we need to choose the right one.
899 x = ql[0]; # default to something sane in case we don't match any or have only one
903 old_file = i[0] + i[1];
904 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
905 actual_size = os.stat(old_file)[stat.ST_SIZE];
906 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
909 legacy_source_untouchable[i[3]] = "";
911 old_file = x[0] + x[1];
912 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
913 actual_size = os.stat(old_file)[stat.ST_SIZE];
916 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
917 # See install() in katie...
918 self.pkg.orig_tar_id = x[3];
919 self.pkg.orig_tar_gz = old_file;
920 if suite_type == "legacy" or suite_type == "legacy-mixed":
921 self.pkg.orig_tar_location = "legacy";
923 self.pkg.orig_tar_location = x[4];
925 # Not there? Check the queue directories...
927 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
928 # See process_it() in jennifer for explanation of this
929 if os.path.exists(in_unchecked):
930 return (self.reject_message, in_unchecked);
932 for dir in [ "Accepted", "New", "Byhand" ]:
933 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
934 if os.path.exists(in_otherdir):
935 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
936 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
938 self.pkg.orig_tar_gz = in_otherdir;
941 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
942 self.pkg.orig_tar_gz = -1;
945 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
947 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
948 self.reject("md5sum for %s doesn't match %s." % (found, file));
949 if actual_size != int(dsc_files[dsc_file]["size"]):
950 self.reject("size for %s doesn't match %s." % (found, file));
952 return (self.reject_message, None);
954 def do_query(self, q):
955 sys.stderr.write("query: \"%s\" ... " % (q));
956 before = time.time();
957 r = self.projectB.query(q);
958 time_diff = time.time()-before;
959 sys.stderr.write("took %.3f seconds.\n" % (time_diff));