3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.45 2004-04-01 17:14:25 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile("\n\n");
34 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]).lower());
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",");
79 (rfc822, name, email) = utils.fix_maintainer (i.strip());
80 uploadernames[name] = "";
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
97 self.nmu = nmu_p(Cnf);
98 self.accept_count = 0;
99 self.accept_bytes = 0L;
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {});
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {};
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
108 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
111 db_access.init(Cnf, self.projectB);
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i);
118 self.pkg.orig_tar_id = None;
119 self.pkg.orig_tar_location = "";
120 self.pkg.orig_tar_gz = None;
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".katie";
126 dump_file = utils.open_file(dump_filename);
127 p = cPickle.Unpickler(dump_file);
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i);
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i);
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to avoid
137 # this so there's some idea of what katie & lisa use from jennifer
139 def dump_vars(self, dest_dir):
140 for i in [ "changes", "dsc", "files", "dsc_files",
141 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
142 exec "%s = self.pkg.%s;" % (i,i);
143 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
144 dump_file = utils.open_file(dump_filename, 'w');
146 os.chmod(dump_filename, 0660);
148 if errno.errorcode[e.errno] == 'EPERM':
149 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
150 if perms & stat.S_IROTH:
151 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
155 p = cPickle.Pickler(dump_file, 1);
156 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
159 for file in files.keys():
161 for i in [ "package", "version", "architecture", "type", "size",
162 "md5sum", "component", "location id", "source package",
163 "source version", "maintainer", "dbtype", "files id",
164 "new", "section", "priority", "othercomponents",
165 "pool name", "original component" ]:
166 if files[file].has_key(i):
167 d_files[file][i] = files[file][i];
169 # Mandatory changes fields
170 for i in [ "distribution", "source", "architecture", "version", "maintainer",
171 "urgency", "fingerprint", "changedby822", "changedbyname",
172 "maintainername", "maintaineremail", "closes" ]:
173 d_changes[i] = changes[i];
174 # Optional changes fields
175 # FIXME: changes should be mandatory
176 for i in [ "changed-by", "maintainer822", "filecontents", "format",
177 "changes", "lisa note" ]:
178 if changes.has_key(i):
179 d_changes[i] = changes[i];
181 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders",
186 for file in dsc_files.keys():
187 d_dsc_files[file] = {};
188 # Mandatory dsc_files fields
189 for i in [ "size", "md5sum" ]:
190 d_dsc_files[file][i] = dsc_files[file][i];
191 # Optional dsc_files fields
192 for i in [ "files id" ]:
193 if dsc_files[file].has_key(i):
194 d_dsc_files[file][i] = dsc_files[file][i];
196 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
197 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
201 ###########################################################################
203 # Set up the per-package template substitution mappings
205 def update_subst (self, reject_message = ""):
207 changes = self.pkg.changes;
208 # If jennifer crashed out in the right place, architecture may still be a string.
209 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
210 changes["architecture"] = { "Unknown" : "" };
211 # and maintainer822 may not exist.
212 if not changes.has_key("maintainer822"):
213 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
215 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
216 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
217 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
219 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
220 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
221 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
222 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
223 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
225 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
226 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
227 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
228 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
229 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
231 # Apply any global override of the Maintainer field
232 if self.Cnf.get("Dinstall::OverrideMaintainer"):
233 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
234 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
236 Subst["__REJECT_MESSAGE__"] = reject_message;
237 Subst["__SOURCE__"] = changes.get("source", "Unknown");
238 Subst["__VERSION__"] = changes.get("version", "Unknown");
240 ###########################################################################
242 def build_summaries(self):
243 changes = self.pkg.changes;
244 files = self.pkg.files;
246 byhand = summary = new = "";
248 # changes["distribution"] may not exist in corner cases
249 # (e.g. unreadable changes files)
250 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
251 changes["distribution"] = {};
253 file_keys = files.keys();
255 for file in file_keys:
256 if files[file].has_key("byhand"):
258 summary += file + " byhand\n"
259 elif files[file].has_key("new"):
261 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
262 if files[file].has_key("othercomponents"):
263 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
264 if files[file]["type"] == "deb":
265 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
267 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
268 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
269 summary += file + "\n to " + destination + "\n"
271 short_summary = summary;
273 # This is for direport's benefit...
274 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
277 summary += "Changes: " + f;
279 summary += self.announce(short_summary, 0)
281 return (summary, short_summary);
283 ###########################################################################
285 def close_bugs (self, summary, action):
286 changes = self.pkg.changes;
290 bugs = changes["closes"].keys();
296 if not self.nmu.is_an_nmu(self.pkg):
297 if changes["distribution"].has_key("experimental"):
298 # tag bugs as fixed-in-experimental for uploads to experimental
299 summary += "Setting bugs to severity fixed: ";
300 control_message = "";
302 summary += "%s " % (bug);
303 control_message += "tag %s + fixed-in-experimental\n" % (bug);
304 if action and control_message != "":
305 Subst["__CONTROL_MESSAGE__"] = control_message;
306 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
307 utils.send_mail (mail_message);
309 self.Logger.log(["setting bugs to fixed"]+bugs);
313 summary += "Closing bugs: ";
315 summary += "%s " % (bug);
317 Subst["__BUG_NUMBER__"] = bug;
318 if changes["distribution"].has_key("stable"):
319 Subst["__STABLE_WARNING__"] = """
320 Note that this package is not part of the released stable Debian
321 distribution. It may have dependencies on other unreleased software,
322 or other instabilities. Please take care if you wish to install it.
323 The update will eventually make its way into the next released Debian
326 Subst["__STABLE_WARNING__"] = "";
327 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
328 utils.send_mail (mail_message);
330 self.Logger.log(["closing bugs"]+bugs);
333 summary += "Setting bugs to severity fixed: ";
334 control_message = "";
336 summary += "%s " % (bug);
337 control_message += "tag %s + fixed\n" % (bug);
338 if action and control_message != "":
339 Subst["__CONTROL_MESSAGE__"] = control_message;
340 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
341 utils.send_mail (mail_message);
343 self.Logger.log(["setting bugs to fixed"]+bugs);
347 ###########################################################################
349 def announce (self, short_summary, action):
352 changes = self.pkg.changes;
354 # Only do announcements for source uploads with a recent dpkg-dev installed
355 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
360 Subst["__SHORT_SUMMARY__"] = short_summary;
362 for dist in changes["distribution"].keys():
363 list = Cnf.Find("Suite::%s::Announce" % (dist));
364 if list == "" or lists_done.has_key(list):
366 lists_done[list] = 1;
367 summary += "Announcing to %s\n" % (list);
370 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
371 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
372 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
373 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
374 utils.send_mail (mail_message);
376 if Cnf.FindB("Dinstall::CloseBugs"):
377 summary = self.close_bugs(summary, action);
381 ###########################################################################
383 def accept (self, summary, short_summary):
386 files = self.pkg.files;
387 changes = self.pkg.changes;
388 changes_file = self.pkg.changes_file;
392 self.Logger.log(["Accepting changes",changes_file]);
394 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
396 # Move all the files into the accepted directory
397 utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
398 file_keys = files.keys();
399 for file in file_keys:
400 utils.move(file, Cnf["Dir::Queue::Accepted"]);
401 self.accept_bytes += float(files[file]["size"])
402 self.accept_count += 1;
404 # Send accept mail, announce to lists, close bugs and check for
405 # override disparities
406 if not Cnf["Dinstall::Options::No-Mail"]:
407 Subst["__SUITE__"] = "";
408 Subst["__SUMMARY__"] = summary;
409 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
410 utils.send_mail(mail_message)
411 self.announce(short_summary, 1)
414 ## Helper stuff for DebBugs Version Tracking
415 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
416 # ??? once queue/* is cleared on *.d.o and/or reprocessed
417 # the conditionalization on dsc["bts changelog"] should be
420 # Write out the version history from the changelog
421 if changes["architecture"].has_key("source") and \
422 dsc.has_key("bts changelog"):
424 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
425 dotprefix=1, perms=0644);
426 version_history = utils.open_file(temp_filename, 'w');
427 version_history.write(dsc["bts changelog"]);
428 version_history.close();
429 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
430 changes_file[:-8]+".versions");
431 os.rename(temp_filename, filename);
433 # Write out the binary -> source mapping.
434 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
435 dotprefix=1, perms=0644);
436 debinfo = utils.open_file(temp_filename, 'w');
437 for file in file_keys:
439 if f["type"] == "deb":
440 line = " ".join([f["package"], f["version"],
441 f["architecture"], f["source package"],
442 f["source version"]]);
443 debinfo.write(line+"\n");
445 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
446 changes_file[:-8]+".debinfo");
447 os.rename(temp_filename, filename);
449 ## Special support to enable clean auto-building of accepted packages
450 self.projectB.query("BEGIN WORK");
451 for suite in changes["distribution"].keys():
452 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
454 suite_id = db_access.get_suite_id(suite);
455 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
456 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
457 dest_dir = os.path.join(dest_dir, suite);
458 for file in file_keys:
459 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
460 dest = os.path.join(dest_dir, file);
461 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
462 # Copy it since the original won't be readable by www-data
463 utils.copy(src, dest);
465 # Create a symlink to it
466 os.symlink(src, dest);
467 # Add it to the list of packages for later processing by apt-ftparchive
468 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
469 # If the .orig.tar.gz is in the pool, create a symlink to
470 # it (if one doesn't already exist)
471 if self.pkg.orig_tar_id:
472 # Determine the .orig.tar.gz file name
473 for dsc_file in self.pkg.dsc_files.keys():
474 if dsc_file.endswith(".orig.tar.gz"):
476 dest = os.path.join(dest_dir, filename);
477 # If it doesn't exist, create a symlink
478 if not os.path.exists(dest):
479 # Find the .orig.tar.gz in the pool
480 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
483 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
484 src = os.path.join(ql[0][0], ql[0][1]);
485 os.symlink(src, dest);
486 # Add it to the list of packages for later processing by apt-ftparchive
487 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
488 # if it does, update things to ensure it's not removed prematurely
490 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
492 self.projectB.query("COMMIT WORK");
494 ###########################################################################
496 def check_override (self):
498 changes = self.pkg.changes;
499 files = self.pkg.files;
502 # Abandon the check if:
503 # a) it's a non-sourceful upload
504 # b) override disparity checks have been disabled
505 # c) we're not sending mail
506 if not changes["architecture"].has_key("source") or \
507 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
508 Cnf["Dinstall::Options::No-Mail"]:
512 file_keys = files.keys();
514 for file in file_keys:
515 if not files[file].has_key("new") and files[file]["type"] == "deb":
516 section = files[file]["section"];
517 override_section = files[file]["override section"];
518 if section.lower() != override_section.lower() and section != "-":
519 # Ignore this; it's a common mistake and not worth whining about
520 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
522 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
523 priority = files[file]["priority"];
524 override_priority = files[file]["override priority"];
525 if priority != override_priority and priority != "-":
526 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
531 Subst["__SUMMARY__"] = summary;
532 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
533 utils.send_mail(mail_message);
535 ###########################################################################
537 def force_reject (self, files):
538 """Forcefully move files from the current directory to the
539 reject directory. If any file already exists in the reject
540 directory it will be moved to the morgue to make way for
546 # Skip any files which don't exist or which we don't have permission to copy.
547 if os.access(file,os.R_OK) == 0:
549 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
551 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
553 # File exists? Let's try and move it to the morgue
554 if errno.errorcode[e.errno] == 'EEXIST':
555 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
557 morgue_file = utils.find_next_free(morgue_file);
558 except utils.tried_too_hard_exc:
559 # Something's either gone badly Pete Tong, or
560 # someone is trying to exploit us.
561 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
563 utils.move(dest_file, morgue_file, perms=0660);
565 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
568 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
572 # If we got here, we own the destination file, so we can
573 # safely overwrite it.
574 utils.move(file, dest_file, 1, perms=0660);
576 ###########################################################################
578 def do_reject (self, manual = 0, reject_message = ""):
579 # If we weren't given a manual rejection message, spawn an
580 # editor so the user can add one in...
581 if manual and not reject_message:
582 temp_filename = utils.temp_filename();
583 editor = os.environ.get("EDITOR","vi")
586 os.system("%s %s" % (editor, temp_filename))
587 file = utils.open_file(temp_filename);
588 reject_message = "".join(file.readlines());
590 print "Reject message:";
591 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
592 prompt = "[R]eject, Edit, Abandon, Quit ?"
594 while prompt.find(answer) == -1:
595 answer = utils.our_raw_input(prompt);
596 m = re_default_answer.search(prompt);
599 answer = answer[:1].upper();
600 os.unlink(temp_filename);
612 reason_filename = pkg.changes_file[:-8] + ".reason";
613 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
615 # Move all the files into the reject directory
616 reject_files = pkg.files.keys() + [pkg.changes_file];
617 self.force_reject(reject_files);
619 # If we fail here someone is probably trying to exploit the race
620 # so let's just raise an exception ...
621 if os.path.exists(reason_filename):
622 os.unlink(reason_filename);
623 reason_file = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
626 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
627 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
628 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
629 os.write(reason_file, reject_message);
630 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
632 # Build up the rejection email
633 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
635 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
636 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
637 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
638 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
639 # Write the rejection email out as the <foo>.reason file
640 os.write(reason_file, reject_mail_message);
642 os.close(reason_file);
644 # Send the rejection mail if appropriate
645 if not Cnf["Dinstall::Options::No-Mail"]:
646 utils.send_mail(reject_mail_message);
648 self.Logger.log(["rejected", pkg.changes_file]);
651 ################################################################################
653 # Ensure that source exists somewhere in the archive for the binary
654 # upload being processed.
656 # (1) exact match => 1.0-3
657 # (2) Bin-only NMU of an MU => 1.0-3.0.1
658 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
660 def source_exists (self, package, source_version, suites = ["any"]):
664 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
667 # source must exist in suite X, or in some other suite that's
668 # mapped to X, recursively... silent-maps are counted too,
669 # unreleased-maps aren't.
670 maps = self.Cnf.ValueList("SuiteMappings")[:]
672 maps = [ m.split() for m in maps ]
673 maps = [ (x[1], x[2]) for x in maps
674 if x[0] == "map" or x[0] == "silent-map" ]
677 if x[1] in s and x[0] not in s:
680 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
681 q = self.projectB.query(que)
683 # Reduce the query results to a list of version numbers
684 ql = map(lambda x: x[0], q.getresult());
687 if source_version in ql:
691 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
692 if orig_source_version in ql:
696 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
697 if orig_source_version in ql:
704 ################################################################################
706 def in_override_p (self, package, component, suite, binary_type, file):
707 files = self.pkg.files;
709 if binary_type == "": # must be source
714 # Override suite name; used for example with proposed-updates
715 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
716 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
718 # Avoid <undef> on unknown distributions
719 suite_id = db_access.get_suite_id(suite);
722 component_id = db_access.get_component_id(component);
723 type_id = db_access.get_override_type_id(type);
725 # FIXME: nasty non-US speficic hack
726 if component[:7].lower() == "non-us/":
727 component = component[7:];
729 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
730 % (package, suite_id, component_id, type_id));
731 result = q.getresult();
732 # If checking for a source package fall back on the binary override type
733 if type == "dsc" and not result:
734 deb_type_id = db_access.get_override_type_id("deb");
735 udeb_type_id = db_access.get_override_type_id("udeb");
736 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
737 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
738 result = q.getresult();
740 # Remember the section and priority so we can check them later if appropriate
742 files[file]["override section"] = result[0][0];
743 files[file]["override priority"] = result[0][1];
747 ################################################################################
749 def reject (self, str, prefix="Rejected: "):
751 # Unlike other rejects we add new lines first to avoid trailing
752 # new lines when this message is passed back up to a caller.
753 if self.reject_message:
754 self.reject_message += "\n";
755 self.reject_message += prefix + str;
757 ################################################################################
759 def cross_suite_version_check(self, query_result, file, new_version):
760 """Ensure versions are newer than existing packages in target
761 suites and that cross-suite version checking rules as
762 set out in the conf file are satisfied."""
764 # Check versions for each target suite
765 for target_suite in self.pkg.changes["distribution"].keys():
766 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
767 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
768 # Enforce "must be newer than target suite" even if conffile omits it
769 if target_suite not in must_be_newer_than:
770 must_be_newer_than.append(target_suite);
771 for entry in query_result:
772 existent_version = entry[0];
774 if suite in must_be_newer_than and \
775 apt_pkg.VersionCompare(new_version, existent_version) != 1:
776 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
777 if suite in must_be_older_than and \
778 apt_pkg.VersionCompare(new_version, existent_version) != -1:
779 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
781 ################################################################################
783 def check_binary_against_db(self, file):
784 self.reject_message = "";
785 files = self.pkg.files;
787 # Ensure version is sane
788 q = self.projectB.query("""
789 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
791 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
792 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
793 % (files[file]["package"],
794 files[file]["architecture"]));
795 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
797 # Check for any existing copies of the file
798 q = self.projectB.query("""
799 SELECT b.id FROM binaries b, architecture a
800 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
801 AND a.id = b.architecture"""
802 % (files[file]["package"],
803 files[file]["version"],
804 files[file]["architecture"]))
806 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
808 return self.reject_message;
810 ################################################################################
812 def check_source_against_db(self, file):
813 self.reject_message = "";
816 # Ensure version is sane
817 q = self.projectB.query("""
818 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
819 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
820 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
822 return self.reject_message;
824 ################################################################################
827 # NB: this function can remove entries from the 'files' index [if
828 # the .orig.tar.gz is a duplicate of the one in the archive]; if
829 # you're iterating over 'files' and call this function as part of
830 # the loop, be sure to add a check to the top of the loop to
831 # ensure you haven't just tried to derefernece the deleted entry.
834 def check_dsc_against_db(self, file):
835 self.reject_message = "";
836 files = self.pkg.files;
837 dsc_files = self.pkg.dsc_files;
838 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
839 self.pkg.orig_tar_gz = None;
841 # Try and find all files mentioned in the .dsc. This has
842 # to work harder to cope with the multiple possible
843 # locations of an .orig.tar.gz.
844 for dsc_file in dsc_files.keys():
846 if files.has_key(dsc_file):
847 actual_md5 = files[dsc_file]["md5sum"];
848 actual_size = int(files[dsc_file]["size"]);
849 found = "%s in incoming" % (dsc_file)
850 # Check the file does not already exist in the archive
851 q = self.projectB.query("SELECT size, md5sum, filename FROM files WHERE filename LIKE '%%%s%%'" % (dsc_file));
854 # Strip out anything that isn't '%s' or '/%s$'
856 if i[2] != dsc_file and i[2][-(len(dsc_file)+1):] != '/'+dsc_file:
859 # "[katie] has not broken them. [katie] has fixed a
860 # brokenness. Your crappy hack exploited a bug in
863 # "(Come on! I thought it was always obvious that
864 # one just doesn't release different files with
865 # the same name and version.)"
866 # -- ajk@ on d-devel@l.d.o
869 # Ignore exact matches for .orig.tar.gz
871 if dsc_file.endswith(".orig.tar.gz"):
873 if files.has_key(dsc_file) and \
874 int(files[dsc_file]["size"]) == int(i[0]) and \
875 files[dsc_file]["md5sum"] == i[1]:
876 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
881 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
882 elif dsc_file.endswith(".orig.tar.gz"):
884 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
886 # Strip out anything that isn't '%s' or '/%s$'
888 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
892 # Unfortunately, we may get more than one match here if,
893 # for example, the package was in potato but had an -sa
894 # upload in woody. So we need to choose the right one.
896 x = ql[0]; # default to something sane in case we don't match any or have only one
900 old_file = i[0] + i[1];
901 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
902 actual_size = os.stat(old_file)[stat.ST_SIZE];
903 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
906 legacy_source_untouchable[i[3]] = "";
908 old_file = x[0] + x[1];
909 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
910 actual_size = os.stat(old_file)[stat.ST_SIZE];
913 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
914 # See install() in katie...
915 self.pkg.orig_tar_id = x[3];
916 self.pkg.orig_tar_gz = old_file;
917 if suite_type == "legacy" or suite_type == "legacy-mixed":
918 self.pkg.orig_tar_location = "legacy";
920 self.pkg.orig_tar_location = x[4];
922 # Not there? Check the queue directories...
924 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
925 # See process_it() in jennifer for explanation of this
926 if os.path.exists(in_unchecked):
927 return (self.reject_message, in_unchecked);
929 for dir in [ "Accepted", "New", "Byhand" ]:
930 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
931 if os.path.exists(in_otherdir):
932 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
933 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
935 self.pkg.orig_tar_gz = in_otherdir;
938 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
939 self.pkg.orig_tar_gz = -1;
942 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
944 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
945 self.reject("md5sum for %s doesn't match %s." % (found, file));
946 if actual_size != int(dsc_files[dsc_file]["size"]):
947 self.reject("size for %s doesn't match %s." % (found, file));
949 return (self.reject_message, None);
951 def do_query(self, q):
952 sys.stderr.write("query: \"%s\" ... " % (q));
953 before = time.time();
954 r = self.projectB.query(q);
955 time_diff = time.time()-before;
956 sys.stderr.write("took %.3f seconds.\n" % (time_diff));