3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.36 2003-07-29 14:57:03 ajt Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile("\n\n");
34 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]).lower());
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",");
79 (rfc822, name, email) = utils.fix_maintainer (i.strip());
80 uploadernames[name] = "";
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
97 # Read in the group-maint override file
98 self.nmu = nmu_p(Cnf);
99 self.accept_count = 0;
100 self.accept_bytes = 0L;
101 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
102 legacy_source_untouchable = {});
104 # Initialize the substitution template mapping global
105 Subst = self.Subst = {};
106 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
107 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
108 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
109 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
112 db_access.init(Cnf, self.projectB);
114 ###########################################################################
116 def init_vars (self):
117 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
118 exec "self.pkg.%s.clear();" % (i);
119 self.pkg.orig_tar_id = None;
120 self.pkg.orig_tar_location = "";
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".katie";
126 dump_file = utils.open_file(dump_filename);
127 p = cPickle.Unpickler(dump_file);
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i);
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i);
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to avoid
137 # this so there's some idea of what katie & lisa use from jennifer
139 def dump_vars(self, dest_dir):
140 for i in [ "changes", "dsc", "files", "dsc_files",
141 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
142 exec "%s = self.pkg.%s;" % (i,i);
143 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
144 dump_file = utils.open_file(dump_filename, 'w');
146 os.chmod(dump_filename, 0660);
148 if errno.errorcode[e.errno] == 'EPERM':
149 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
150 if perms & stat.S_IROTH:
151 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
155 p = cPickle.Pickler(dump_file, 1);
156 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
159 for file in files.keys():
161 for i in [ "package", "version", "architecture", "type", "size",
162 "md5sum", "component", "location id", "source package",
163 "source version", "maintainer", "dbtype", "files id",
164 "new", "section", "priority", "othercomponents",
165 "pool name", "original component" ]:
166 if files[file].has_key(i):
167 d_files[file][i] = files[file][i];
169 # Mandatory changes fields
170 for i in [ "distribution", "source", "architecture", "version", "maintainer",
171 "urgency", "fingerprint", "changedby822", "changedbyname",
172 "maintainername", "maintaineremail", "closes" ]:
173 d_changes[i] = changes[i];
174 # Optional changes fields
175 # FIXME: changes should be mandatory
176 for i in [ "changed-by", "maintainer822", "filecontents", "format",
177 "changes", "lisa note" ]:
178 if changes.has_key(i):
179 d_changes[i] = changes[i];
181 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
185 for file in dsc_files.keys():
186 d_dsc_files[file] = {};
187 # Mandatory dsc_files fields
188 for i in [ "size", "md5sum" ]:
189 d_dsc_files[file][i] = dsc_files[file][i];
190 # Optional dsc_files fields
191 for i in [ "files id" ]:
192 if dsc_files[file].has_key(i):
193 d_dsc_files[file][i] = dsc_files[file][i];
195 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
196 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
200 ###########################################################################
202 # Set up the per-package template substitution mappings
204 def update_subst (self, reject_message = ""):
206 changes = self.pkg.changes;
207 # If jennifer crashed out in the right place, architecture may still be a string.
208 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
209 changes["architecture"] = { "Unknown" : "" };
210 # and maintainer822 may not exist.
211 if not changes.has_key("maintainer822"):
212 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
214 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
215 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
216 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
218 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
219 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
220 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
221 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
222 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
224 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
225 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
226 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
227 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
228 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
230 # Apply any global override of the Maintainer field
231 if self.Cnf.get("Dinstall::OverrideMaintainer"):
232 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
233 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
235 Subst["__REJECT_MESSAGE__"] = reject_message;
236 Subst["__SOURCE__"] = changes.get("source", "Unknown");
237 Subst["__VERSION__"] = changes.get("version", "Unknown");
239 ###########################################################################
241 def build_summaries(self):
242 changes = self.pkg.changes;
243 files = self.pkg.files;
245 byhand = summary = new = "";
247 # changes["distribution"] may not exist in corner cases
248 # (e.g. unreadable changes files)
249 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
250 changes["distribution"] = {};
252 file_keys = files.keys();
254 for file in file_keys:
255 if files[file].has_key("byhand"):
257 summary += file + " byhand\n"
258 elif files[file].has_key("new"):
260 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
261 if files[file].has_key("othercomponents"):
262 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
263 if files[file]["type"] == "deb":
264 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
266 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
267 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
268 summary += file + "\n to " + destination + "\n"
270 short_summary = summary;
272 # This is for direport's benefit...
273 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
276 summary += "Changes: " + f;
278 summary += self.announce(short_summary, 0)
280 return (summary, short_summary);
282 ###########################################################################
284 def close_bugs (self, summary, action):
285 changes = self.pkg.changes;
289 bugs = changes["closes"].keys();
295 if not self.nmu.is_an_nmu(self.pkg):
296 summary += "Closing bugs: ";
298 summary += "%s " % (bug);
300 Subst["__BUG_NUMBER__"] = bug;
301 if changes["distribution"].has_key("stable"):
302 Subst["__STABLE_WARNING__"] = """
303 Note that this package is not part of the released stable Debian
304 distribution. It may have dependencies on other unreleased software,
305 or other instabilities. Please take care if you wish to install it.
306 The update will eventually make its way into the next released Debian
309 Subst["__STABLE_WARNING__"] = "";
310 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
311 utils.send_mail (mail_message);
313 self.Logger.log(["closing bugs"]+bugs);
315 summary += "Setting bugs to severity fixed: ";
316 control_message = "";
318 summary += "%s " % (bug);
319 control_message += "tag %s + fixed\n" % (bug);
320 if action and control_message != "":
321 Subst["__CONTROL_MESSAGE__"] = control_message;
322 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
323 utils.send_mail (mail_message);
325 self.Logger.log(["setting bugs to fixed"]+bugs);
329 ###########################################################################
331 def announce (self, short_summary, action):
334 changes = self.pkg.changes;
336 # Only do announcements for source uploads with a recent dpkg-dev installed
337 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
342 Subst["__SHORT_SUMMARY__"] = short_summary;
344 for dist in changes["distribution"].keys():
345 list = Cnf.Find("Suite::%s::Announce" % (dist));
346 if list == "" or lists_done.has_key(list):
348 lists_done[list] = 1;
349 summary += "Announcing to %s\n" % (list);
352 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
353 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
354 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
355 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
356 utils.send_mail (mail_message);
358 if Cnf.FindB("Dinstall::CloseBugs"):
359 summary = self.close_bugs(summary, action);
363 ###########################################################################
365 def accept (self, summary, short_summary):
368 files = self.pkg.files;
371 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
373 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
375 # Move all the files into the accepted directory
376 utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
377 file_keys = files.keys();
378 for file in file_keys:
379 utils.move(file, Cnf["Dir::Queue::Accepted"]);
380 self.accept_bytes += float(files[file]["size"])
381 self.accept_count += 1;
383 # Send accept mail, announce to lists, close bugs and check for
384 # override disparities
385 if not Cnf["Dinstall::Options::No-Mail"]:
386 Subst["__SUITE__"] = "";
387 Subst["__SUMMARY__"] = summary;
388 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
389 utils.send_mail(mail_message)
390 self.announce(short_summary, 1)
392 # Special support to enable clean auto-building of accepted packages
393 self.projectB.query("BEGIN WORK");
394 for suite in self.pkg.changes["distribution"].keys():
395 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
397 suite_id = db_access.get_suite_id(suite);
398 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
399 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
400 dest_dir = os.path.join(dest_dir, suite);
401 for file in file_keys:
402 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
403 dest = os.path.join(dest_dir, file);
404 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
405 # Copy it since the original won't be readable by www-data
406 utils.copy(src, dest);
408 # Create a symlink to it
409 os.symlink(src, dest);
410 # Add it to the list of packages for later processing by apt-ftparchive
411 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
412 # If the .orig.tar.gz is in the pool, create a symlink to
413 # it (if one doesn't already exist)
414 if self.pkg.orig_tar_id:
415 # Determine the .orig.tar.gz file name
416 for dsc_file in self.pkg.dsc_files.keys():
417 if dsc_file.endswith(".orig.tar.gz"):
419 dest = os.path.join(dest_dir, filename);
420 # If it doesn't exist, create a symlink
421 if not os.path.exists(dest):
422 # Find the .orig.tar.gz in the pool
423 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
426 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
427 src = os.path.join(ql[0][0], ql[0][1]);
428 os.symlink(src, dest);
429 # Add it to the list of packages for later processing by apt-ftparchive
430 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
431 # if it does, update things to ensure it's not removed prematurely
433 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
435 self.projectB.query("COMMIT WORK");
437 ###########################################################################
439 def check_override (self):
441 changes = self.pkg.changes;
442 files = self.pkg.files;
445 # Abandon the check if:
446 # a) it's a non-sourceful upload
447 # b) override disparity checks have been disabled
448 # c) we're not sending mail
449 if not changes["architecture"].has_key("source") or \
450 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
451 Cnf["Dinstall::Options::No-Mail"]:
455 for file in files.keys():
456 if not files[file].has_key("new") and files[file]["type"] == "deb":
457 section = files[file]["section"];
458 override_section = files[file]["override section"];
459 if section.lower() != override_section.lower() and section != "-":
460 # Ignore this; it's a common mistake and not worth whining about
461 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
463 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
464 priority = files[file]["priority"];
465 override_priority = files[file]["override priority"];
466 if priority != override_priority and priority != "-":
467 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
472 Subst["__SUMMARY__"] = summary;
473 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
474 utils.send_mail(mail_message);
476 ###########################################################################
478 def force_reject (self, files):
479 """Forcefully move files from the current directory to the
480 reject directory. If any file already exists in the reject
481 directory it will be moved to the morgue to make way for
487 # Skip any files which don't exist or which we don't have permission to copy.
488 if os.access(file,os.R_OK) == 0:
490 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
492 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
494 # File exists? Let's try and move it to the morgue
495 if errno.errorcode[e.errno] == 'EEXIST':
496 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
498 morgue_file = utils.find_next_free(morgue_file);
499 except utils.tried_too_hard_exc:
500 # Something's either gone badly Pete Tong, or
501 # someone is trying to exploit us.
502 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
504 utils.move(dest_file, morgue_file, perms=0660);
506 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
509 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
513 # If we got here, we own the destination file, so we can
514 # safely overwrite it.
515 utils.move(file, dest_file, 1, perms=0660);
517 ###########################################################################
519 def do_reject (self, manual = 0, reject_message = ""):
520 # If we weren't given a manual rejection message, spawn an
521 # editor so the user can add one in...
522 if manual and not reject_message:
523 temp_filename = tempfile.mktemp();
524 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
526 editor = os.environ.get("EDITOR","vi")
529 os.system("%s %s" % (editor, temp_filename))
530 file = utils.open_file(temp_filename);
531 reject_message = "".join(file.readlines());
533 print "Reject message:";
534 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
535 prompt = "[R]eject, Edit, Abandon, Quit ?"
537 while prompt.find(answer) == -1:
538 answer = utils.our_raw_input(prompt);
539 m = re_default_answer.search(prompt);
542 answer = answer[:1].upper();
543 os.unlink(temp_filename);
555 reason_filename = pkg.changes_file[:-8] + ".reason";
556 reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
558 # Move all the files into the reject directory
559 reject_files = pkg.files.keys() + [pkg.changes_file];
560 self.force_reject(reject_files);
562 # If we fail here someone is probably trying to exploit the race
563 # so let's just raise an exception ...
564 if os.path.exists(reject_filename):
565 os.unlink(reject_filename);
566 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
569 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
570 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
571 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
572 os.write(fd, reject_message);
574 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
576 # Build up the rejection email
577 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
579 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
580 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
581 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
582 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
584 # Write the rejection email out as the <foo>.reason file
585 os.write(fd, reject_mail_message);
588 # Send the rejection mail if appropriate
589 if not Cnf["Dinstall::Options::No-Mail"]:
590 utils.send_mail(reject_mail_message);
592 self.Logger.log(["rejected", pkg.changes_file]);
595 ################################################################################
597 # Ensure that source exists somewhere in the archive for the binary
598 # upload being processed.
600 # (1) exact match => 1.0-3
601 # (2) Bin-only NMU of an MU => 1.0-3.0.1
602 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
604 def source_exists (self, package, source_version, suites = ["any"]):
608 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
611 suite_id = db_access.get_suite_id(suite);
612 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE sa.suite = %d AND s.source = '%s'" % (suite_id, package)
613 q = self.projectB.query(que)
615 # Reduce the query results to a list of version numbers
616 ql = map(lambda x: x[0], q.getresult());
619 if ql.count(source_version):
623 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
624 if ql.count(orig_source_version):
628 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
629 if ql.count(orig_source_version):
636 ################################################################################
638 def in_override_p (self, package, component, suite, binary_type, file):
639 files = self.pkg.files;
641 if binary_type == "": # must be source
646 # Override suite name; used for example with proposed-updates
647 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
648 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
650 # Avoid <undef> on unknown distributions
651 suite_id = db_access.get_suite_id(suite);
654 component_id = db_access.get_component_id(component);
655 type_id = db_access.get_override_type_id(type);
657 # FIXME: nasty non-US speficic hack
658 if component[:7].lower() == "non-us/":
659 component = component[7:];
661 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
662 % (package, suite_id, component_id, type_id));
663 result = q.getresult();
664 # If checking for a source package fall back on the binary override type
665 if type == "dsc" and not result:
666 deb_type_id = db_access.get_override_type_id("deb");
667 udeb_type_id = db_access.get_override_type_id("udeb");
668 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
669 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
670 result = q.getresult();
672 # Remember the section and priority so we can check them later if appropriate
674 files[file]["override section"] = result[0][0];
675 files[file]["override priority"] = result[0][1];
679 ################################################################################
681 def reject (self, str, prefix="Rejected: "):
683 # Unlike other rejects we add new lines first to avoid trailing
684 # new lines when this message is passed back up to a caller.
685 if self.reject_message:
686 self.reject_message += "\n";
687 self.reject_message += prefix + str;
689 ################################################################################
691 def cross_suite_version_check(self, query_result, file, new_version):
692 """Ensure versions are newer than existing packages in target
693 suites and that cross-suite version checking rules as
694 set out in the conf file are satisfied."""
696 # Check versions for each target suite
697 for target_suite in self.pkg.changes["distribution"].keys():
698 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
699 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
700 # Enforce "must be newer than target suite" even if conffile omits it
701 if target_suite not in must_be_newer_than:
702 must_be_newer_than.append(target_suite);
703 for entry in query_result:
704 existent_version = entry[0];
706 if suite in must_be_newer_than and \
707 apt_pkg.VersionCompare(new_version, existent_version) != 1:
708 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
709 if suite in must_be_older_than and \
710 apt_pkg.VersionCompare(new_version, existent_version) != -1:
711 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
713 ################################################################################
715 def check_binary_against_db(self, file):
716 self.reject_message = "";
717 files = self.pkg.files;
719 # Ensure version is sane
720 q = self.projectB.query("""
721 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
723 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
724 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
725 % (files[file]["package"],
726 files[file]["architecture"]));
727 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
729 # Check for any existing copies of the file
730 q = self.projectB.query("""
731 SELECT b.id FROM binaries b, architecture a
732 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
733 AND a.id = b.architecture"""
734 % (files[file]["package"],
735 files[file]["version"],
736 files[file]["architecture"]))
738 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
740 return self.reject_message;
742 ################################################################################
744 def check_source_against_db(self, file):
745 self.reject_message = "";
748 # Ensure version is sane
749 q = self.projectB.query("""
750 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
751 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
752 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
754 return self.reject_message;
756 ################################################################################
759 # NB: this function can remove entries from the 'files' index [if
760 # the .orig.tar.gz is a duplicate of the one in the archive]; if
761 # you're iterating over 'files' and call this function as part of
762 # the loop, be sure to add a check to the top of the loop to
763 # ensure you haven't just tried to derefernece the deleted entry.
766 def check_dsc_against_db(self, file):
767 self.reject_message = "";
768 files = self.pkg.files;
769 dsc_files = self.pkg.dsc_files;
770 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
773 # Try and find all files mentioned in the .dsc. This has
774 # to work harder to cope with the multiple possible
775 # locations of an .orig.tar.gz.
776 for dsc_file in dsc_files.keys():
778 if files.has_key(dsc_file):
779 actual_md5 = files[dsc_file]["md5sum"];
780 actual_size = int(files[dsc_file]["size"]);
781 found = "%s in incoming" % (dsc_file)
782 # Check the file does not already exist in the archive
783 q = self.projectB.query("SELECT size, md5sum, filename FROM files WHERE filename LIKE '%%%s%%'" % (dsc_file));
786 # Strip out anything that isn't '%s' or '/%s$'
788 if i[2] != dsc_file and i[2][-(len(dsc_file)+1):] != '/'+dsc_file:
789 self.Logger.log(["check_dsc_against_db",i[2],dsc_file]);
792 # "[katie] has not broken them. [katie] has fixed a
793 # brokenness. Your crappy hack exploited a bug in
796 # "(Come on! I thought it was always obvious that
797 # one just doesn't release different files with
798 # the same name and version.)"
799 # -- ajk@ on d-devel@l.d.o
802 # Ignore exact matches for .orig.tar.gz
804 if dsc_file.endswith(".orig.tar.gz"):
806 if files.has_key(dsc_file) and \
807 int(files[dsc_file]["size"]) == int(i[0]) and \
808 files[dsc_file]["md5sum"] == i[1]:
809 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
814 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
815 elif dsc_file.endswith(".orig.tar.gz"):
817 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
819 # Strip out anything that isn't '%s' or '/%s$'
821 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
822 self.Logger.log(["check_dsc_against_db",i[1],dsc_file]);
826 # Unfortunately, we make get more than one
827 # match here if, for example, the package was
828 # in potato but had a -sa upload in woody. So
829 # we need to choose the right one.
831 x = ql[0]; # default to something sane in case we don't match any or have only one
835 old_file = i[0] + i[1];
836 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
837 actual_size = os.stat(old_file)[stat.ST_SIZE];
838 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
841 legacy_source_untouchable[i[3]] = "";
843 old_file = x[0] + x[1];
844 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
845 actual_size = os.stat(old_file)[stat.ST_SIZE];
848 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
849 # See install() in katie...
850 self.pkg.orig_tar_id = x[3];
851 if suite_type == "legacy" or suite_type == "legacy-mixed":
852 self.pkg.orig_tar_location = "legacy";
854 self.pkg.orig_tar_location = x[4];
856 # Not there? Check the queue directories...
858 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
859 # See process_it() in jennifer for explanation of this
860 if os.path.exists(in_unchecked):
861 return (self.reject_message, in_unchecked);
863 for dir in [ "Accepted", "New", "Byhand" ]:
864 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
865 if os.path.exists(in_otherdir):
866 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
867 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
871 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
874 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
876 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
877 self.reject("md5sum for %s doesn't match %s." % (found, file));
878 if actual_size != int(dsc_files[dsc_file]["size"]):
879 self.reject("size for %s doesn't match %s." % (found, file));
881 return (self.reject_message, orig_tar_gz);
883 def do_query(self, q):
884 sys.stderr.write("query: \"%s\" ... " % (q));
885 before = time.time();
886 r = self.projectB.query(q);
887 time_diff = time.time()-before;
888 sys.stderr.write("took %.3f seconds.\n" % (time_diff));