3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.43 2003-11-07 01:48:42 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile("\n\n");
34 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]).lower());
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",");
79 (rfc822, name, email) = utils.fix_maintainer (i.strip());
80 uploadernames[name] = "";
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
97 # Read in the group-maint override file
98 self.nmu = nmu_p(Cnf);
99 self.accept_count = 0;
100 self.accept_bytes = 0L;
101 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
102 legacy_source_untouchable = {});
104 # Initialize the substitution template mapping global
105 Subst = self.Subst = {};
106 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
107 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
108 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
109 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
112 db_access.init(Cnf, self.projectB);
114 ###########################################################################
116 def init_vars (self):
117 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
118 exec "self.pkg.%s.clear();" % (i);
119 self.pkg.orig_tar_id = None;
120 self.pkg.orig_tar_location = "";
122 ###########################################################################
124 def update_vars (self):
125 dump_filename = self.pkg.changes_file[:-8]+".katie";
126 dump_file = utils.open_file(dump_filename);
127 p = cPickle.Unpickler(dump_file);
128 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129 exec "self.pkg.%s.update(p.load());" % (i);
130 for i in [ "orig_tar_id", "orig_tar_location" ]:
131 exec "self.pkg.%s = p.load();" % (i);
134 ###########################################################################
136 # This could just dump the dictionaries as is, but I'd like to avoid
137 # this so there's some idea of what katie & lisa use from jennifer
139 def dump_vars(self, dest_dir):
140 for i in [ "changes", "dsc", "files", "dsc_files",
141 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
142 exec "%s = self.pkg.%s;" % (i,i);
143 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
144 dump_file = utils.open_file(dump_filename, 'w');
146 os.chmod(dump_filename, 0660);
148 if errno.errorcode[e.errno] == 'EPERM':
149 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
150 if perms & stat.S_IROTH:
151 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
155 p = cPickle.Pickler(dump_file, 1);
156 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
159 for file in files.keys():
161 for i in [ "package", "version", "architecture", "type", "size",
162 "md5sum", "component", "location id", "source package",
163 "source version", "maintainer", "dbtype", "files id",
164 "new", "section", "priority", "othercomponents",
165 "pool name", "original component" ]:
166 if files[file].has_key(i):
167 d_files[file][i] = files[file][i];
169 # Mandatory changes fields
170 for i in [ "distribution", "source", "architecture", "version", "maintainer",
171 "urgency", "fingerprint", "changedby822", "changedbyname",
172 "maintainername", "maintaineremail", "closes" ]:
173 d_changes[i] = changes[i];
174 # Optional changes fields
175 # FIXME: changes should be mandatory
176 for i in [ "changed-by", "maintainer822", "filecontents", "format",
177 "changes", "lisa note" ]:
178 if changes.has_key(i):
179 d_changes[i] = changes[i];
181 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
185 for file in dsc_files.keys():
186 d_dsc_files[file] = {};
187 # Mandatory dsc_files fields
188 for i in [ "size", "md5sum" ]:
189 d_dsc_files[file][i] = dsc_files[file][i];
190 # Optional dsc_files fields
191 for i in [ "files id" ]:
192 if dsc_files[file].has_key(i):
193 d_dsc_files[file][i] = dsc_files[file][i];
195 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
196 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
200 ###########################################################################
202 # Set up the per-package template substitution mappings
204 def update_subst (self, reject_message = ""):
206 changes = self.pkg.changes;
207 # If jennifer crashed out in the right place, architecture may still be a string.
208 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
209 changes["architecture"] = { "Unknown" : "" };
210 # and maintainer822 may not exist.
211 if not changes.has_key("maintainer822"):
212 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
214 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
215 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
216 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
218 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
219 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
220 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
221 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
222 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
224 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
225 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
226 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
227 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
228 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
230 # Apply any global override of the Maintainer field
231 if self.Cnf.get("Dinstall::OverrideMaintainer"):
232 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
233 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
235 Subst["__REJECT_MESSAGE__"] = reject_message;
236 Subst["__SOURCE__"] = changes.get("source", "Unknown");
237 Subst["__VERSION__"] = changes.get("version", "Unknown");
239 ###########################################################################
241 def build_summaries(self):
242 changes = self.pkg.changes;
243 files = self.pkg.files;
245 byhand = summary = new = "";
247 # changes["distribution"] may not exist in corner cases
248 # (e.g. unreadable changes files)
249 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
250 changes["distribution"] = {};
252 file_keys = files.keys();
254 for file in file_keys:
255 if files[file].has_key("byhand"):
257 summary += file + " byhand\n"
258 elif files[file].has_key("new"):
260 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
261 if files[file].has_key("othercomponents"):
262 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
263 if files[file]["type"] == "deb":
264 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
266 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
267 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
268 summary += file + "\n to " + destination + "\n"
270 short_summary = summary;
272 # This is for direport's benefit...
273 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
276 summary += "Changes: " + f;
278 summary += self.announce(short_summary, 0)
280 return (summary, short_summary);
282 ###########################################################################
284 def close_bugs (self, summary, action):
285 changes = self.pkg.changes;
289 bugs = changes["closes"].keys();
295 if not self.nmu.is_an_nmu(self.pkg):
296 if changes["distribution"].has_key("experimental"):
297 # tag bugs as fixed-in-experimental for uploads to experimental
298 summary += "Setting bugs to severity fixed: ";
299 control_message = "";
301 summary += "%s " % (bug);
302 control_message += "tag %s + fixed-in-experimental\n" % (bug);
303 if action and control_message != "":
304 Subst["__CONTROL_MESSAGE__"] = control_message;
305 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
306 utils.send_mail (mail_message);
308 self.Logger.log(["setting bugs to fixed"]+bugs);
312 summary += "Closing bugs: ";
314 summary += "%s " % (bug);
316 Subst["__BUG_NUMBER__"] = bug;
317 if changes["distribution"].has_key("stable"):
318 Subst["__STABLE_WARNING__"] = """
319 Note that this package is not part of the released stable Debian
320 distribution. It may have dependencies on other unreleased software,
321 or other instabilities. Please take care if you wish to install it.
322 The update will eventually make its way into the next released Debian
325 Subst["__STABLE_WARNING__"] = "";
326 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
327 utils.send_mail (mail_message);
329 self.Logger.log(["closing bugs"]+bugs);
332 summary += "Setting bugs to severity fixed: ";
333 control_message = "";
335 summary += "%s " % (bug);
336 control_message += "tag %s + fixed\n" % (bug);
337 if action and control_message != "":
338 Subst["__CONTROL_MESSAGE__"] = control_message;
339 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
340 utils.send_mail (mail_message);
342 self.Logger.log(["setting bugs to fixed"]+bugs);
346 ###########################################################################
348 def announce (self, short_summary, action):
351 changes = self.pkg.changes;
353 # Only do announcements for source uploads with a recent dpkg-dev installed
354 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
359 Subst["__SHORT_SUMMARY__"] = short_summary;
361 for dist in changes["distribution"].keys():
362 list = Cnf.Find("Suite::%s::Announce" % (dist));
363 if list == "" or lists_done.has_key(list):
365 lists_done[list] = 1;
366 summary += "Announcing to %s\n" % (list);
369 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
370 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
371 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
372 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
373 utils.send_mail (mail_message);
375 if Cnf.FindB("Dinstall::CloseBugs"):
376 summary = self.close_bugs(summary, action);
380 ###########################################################################
382 def accept (self, summary, short_summary):
385 files = self.pkg.files;
388 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
390 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
392 # Move all the files into the accepted directory
393 utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
394 file_keys = files.keys();
395 for file in file_keys:
396 utils.move(file, Cnf["Dir::Queue::Accepted"]);
397 self.accept_bytes += float(files[file]["size"])
398 self.accept_count += 1;
400 # Send accept mail, announce to lists, close bugs and check for
401 # override disparities
402 if not Cnf["Dinstall::Options::No-Mail"]:
403 Subst["__SUITE__"] = "";
404 Subst["__SUMMARY__"] = summary;
405 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
406 utils.send_mail(mail_message)
407 self.announce(short_summary, 1)
409 # Special support to enable clean auto-building of accepted packages
410 self.projectB.query("BEGIN WORK");
411 for suite in self.pkg.changes["distribution"].keys():
412 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
414 suite_id = db_access.get_suite_id(suite);
415 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
416 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
417 dest_dir = os.path.join(dest_dir, suite);
418 for file in file_keys:
419 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
420 dest = os.path.join(dest_dir, file);
421 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
422 # Copy it since the original won't be readable by www-data
423 utils.copy(src, dest);
425 # Create a symlink to it
426 os.symlink(src, dest);
427 # Add it to the list of packages for later processing by apt-ftparchive
428 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
429 # If the .orig.tar.gz is in the pool, create a symlink to
430 # it (if one doesn't already exist)
431 if self.pkg.orig_tar_id:
432 # Determine the .orig.tar.gz file name
433 for dsc_file in self.pkg.dsc_files.keys():
434 if dsc_file.endswith(".orig.tar.gz"):
436 dest = os.path.join(dest_dir, filename);
437 # If it doesn't exist, create a symlink
438 if not os.path.exists(dest):
439 # Find the .orig.tar.gz in the pool
440 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
443 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
444 src = os.path.join(ql[0][0], ql[0][1]);
445 os.symlink(src, dest);
446 # Add it to the list of packages for later processing by apt-ftparchive
447 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
448 # if it does, update things to ensure it's not removed prematurely
450 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
452 self.projectB.query("COMMIT WORK");
454 ###########################################################################
456 def check_override (self):
458 changes = self.pkg.changes;
459 files = self.pkg.files;
462 # Abandon the check if:
463 # a) it's a non-sourceful upload
464 # b) override disparity checks have been disabled
465 # c) we're not sending mail
466 if not changes["architecture"].has_key("source") or \
467 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
468 Cnf["Dinstall::Options::No-Mail"]:
472 file_keys = files.keys();
474 for file in file_keys:
475 if not files[file].has_key("new") and files[file]["type"] == "deb":
476 section = files[file]["section"];
477 override_section = files[file]["override section"];
478 if section.lower() != override_section.lower() and section != "-":
479 # Ignore this; it's a common mistake and not worth whining about
480 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
482 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
483 priority = files[file]["priority"];
484 override_priority = files[file]["override priority"];
485 if priority != override_priority and priority != "-":
486 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
491 Subst["__SUMMARY__"] = summary;
492 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
493 utils.send_mail(mail_message);
495 ###########################################################################
497 def force_reject (self, files):
498 """Forcefully move files from the current directory to the
499 reject directory. If any file already exists in the reject
500 directory it will be moved to the morgue to make way for
506 # Skip any files which don't exist or which we don't have permission to copy.
507 if os.access(file,os.R_OK) == 0:
509 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
511 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
513 # File exists? Let's try and move it to the morgue
514 if errno.errorcode[e.errno] == 'EEXIST':
515 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
517 morgue_file = utils.find_next_free(morgue_file);
518 except utils.tried_too_hard_exc:
519 # Something's either gone badly Pete Tong, or
520 # someone is trying to exploit us.
521 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
523 utils.move(dest_file, morgue_file, perms=0660);
525 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
528 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
532 # If we got here, we own the destination file, so we can
533 # safely overwrite it.
534 utils.move(file, dest_file, 1, perms=0660);
536 ###########################################################################
538 def do_reject (self, manual = 0, reject_message = ""):
539 # If we weren't given a manual rejection message, spawn an
540 # editor so the user can add one in...
541 if manual and not reject_message:
542 temp_filename = tempfile.mktemp();
543 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
545 editor = os.environ.get("EDITOR","vi")
548 os.system("%s %s" % (editor, temp_filename))
549 file = utils.open_file(temp_filename);
550 reject_message = "".join(file.readlines());
552 print "Reject message:";
553 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
554 prompt = "[R]eject, Edit, Abandon, Quit ?"
556 while prompt.find(answer) == -1:
557 answer = utils.our_raw_input(prompt);
558 m = re_default_answer.search(prompt);
561 answer = answer[:1].upper();
562 os.unlink(temp_filename);
574 reason_filename = pkg.changes_file[:-8] + ".reason";
575 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
577 # Move all the files into the reject directory
578 reject_files = pkg.files.keys() + [pkg.changes_file];
579 self.force_reject(reject_files);
581 # If we fail here someone is probably trying to exploit the race
582 # so let's just raise an exception ...
583 if os.path.exists(reason_filename):
584 os.unlink(reason_filename);
585 reason_file = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
588 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
589 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
590 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
591 os.write(reason_file, reject_message);
592 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
594 # Build up the rejection email
595 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
597 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
598 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
599 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
600 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
601 # Write the rejection email out as the <foo>.reason file
602 os.write(reason_file, reject_mail_message);
604 os.close(reason_file);
606 # Send the rejection mail if appropriate
607 if not Cnf["Dinstall::Options::No-Mail"]:
608 utils.send_mail(reject_mail_message);
610 self.Logger.log(["rejected", pkg.changes_file]);
613 ################################################################################
615 # Ensure that source exists somewhere in the archive for the binary
616 # upload being processed.
618 # (1) exact match => 1.0-3
619 # (2) Bin-only NMU of an MU => 1.0-3.0.1
620 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
622 def source_exists (self, package, source_version, suites = ["any"]):
626 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
629 # source must exist in suite X, or in some other suite that's
630 # mapped to X, recursively... silent-maps are counted too,
631 # unreleased-maps aren't.
632 maps = self.Cnf.ValueList("SuiteMappings")[:]
634 maps = [ m.split() for m in maps ]
635 maps = [ (x[1], x[2]) for x in maps
636 if x[0] == "map" or x[0] == "silent-map" ]
639 if x[1] in s and x[0] not in s:
642 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
643 q = self.projectB.query(que)
645 # Reduce the query results to a list of version numbers
646 ql = map(lambda x: x[0], q.getresult());
649 if ql.count(source_version):
653 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
654 if ql.count(orig_source_version):
658 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
659 if ql.count(orig_source_version):
666 ################################################################################
668 def in_override_p (self, package, component, suite, binary_type, file):
669 files = self.pkg.files;
671 if binary_type == "": # must be source
676 # Override suite name; used for example with proposed-updates
677 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
678 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
680 # Avoid <undef> on unknown distributions
681 suite_id = db_access.get_suite_id(suite);
684 component_id = db_access.get_component_id(component);
685 type_id = db_access.get_override_type_id(type);
687 # FIXME: nasty non-US speficic hack
688 if component[:7].lower() == "non-us/":
689 component = component[7:];
691 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
692 % (package, suite_id, component_id, type_id));
693 result = q.getresult();
694 # If checking for a source package fall back on the binary override type
695 if type == "dsc" and not result:
696 deb_type_id = db_access.get_override_type_id("deb");
697 udeb_type_id = db_access.get_override_type_id("udeb");
698 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
699 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
700 result = q.getresult();
702 # Remember the section and priority so we can check them later if appropriate
704 files[file]["override section"] = result[0][0];
705 files[file]["override priority"] = result[0][1];
709 ################################################################################
711 def reject (self, str, prefix="Rejected: "):
713 # Unlike other rejects we add new lines first to avoid trailing
714 # new lines when this message is passed back up to a caller.
715 if self.reject_message:
716 self.reject_message += "\n";
717 self.reject_message += prefix + str;
719 ################################################################################
721 def cross_suite_version_check(self, query_result, file, new_version):
722 """Ensure versions are newer than existing packages in target
723 suites and that cross-suite version checking rules as
724 set out in the conf file are satisfied."""
726 # Check versions for each target suite
727 for target_suite in self.pkg.changes["distribution"].keys():
728 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
729 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
730 # Enforce "must be newer than target suite" even if conffile omits it
731 if target_suite not in must_be_newer_than:
732 must_be_newer_than.append(target_suite);
733 for entry in query_result:
734 existent_version = entry[0];
736 if suite in must_be_newer_than and \
737 apt_pkg.VersionCompare(new_version, existent_version) != 1:
738 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
739 if suite in must_be_older_than and \
740 apt_pkg.VersionCompare(new_version, existent_version) != -1:
741 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
743 ################################################################################
745 def check_binary_against_db(self, file):
746 self.reject_message = "";
747 files = self.pkg.files;
749 # Ensure version is sane
750 q = self.projectB.query("""
751 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
753 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
754 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
755 % (files[file]["package"],
756 files[file]["architecture"]));
757 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
759 # Check for any existing copies of the file
760 q = self.projectB.query("""
761 SELECT b.id FROM binaries b, architecture a
762 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
763 AND a.id = b.architecture"""
764 % (files[file]["package"],
765 files[file]["version"],
766 files[file]["architecture"]))
768 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
770 return self.reject_message;
772 ################################################################################
774 def check_source_against_db(self, file):
775 self.reject_message = "";
778 # Ensure version is sane
779 q = self.projectB.query("""
780 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
781 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
782 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
784 return self.reject_message;
786 ################################################################################
789 # NB: this function can remove entries from the 'files' index [if
790 # the .orig.tar.gz is a duplicate of the one in the archive]; if
791 # you're iterating over 'files' and call this function as part of
792 # the loop, be sure to add a check to the top of the loop to
793 # ensure you haven't just tried to derefernece the deleted entry.
796 def check_dsc_against_db(self, file):
797 self.reject_message = "";
798 files = self.pkg.files;
799 dsc_files = self.pkg.dsc_files;
800 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
803 # Try and find all files mentioned in the .dsc. This has
804 # to work harder to cope with the multiple possible
805 # locations of an .orig.tar.gz.
806 for dsc_file in dsc_files.keys():
808 if files.has_key(dsc_file):
809 actual_md5 = files[dsc_file]["md5sum"];
810 actual_size = int(files[dsc_file]["size"]);
811 found = "%s in incoming" % (dsc_file)
812 # Check the file does not already exist in the archive
813 q = self.projectB.query("SELECT size, md5sum, filename FROM files WHERE filename LIKE '%%%s%%'" % (dsc_file));
816 # Strip out anything that isn't '%s' or '/%s$'
818 if i[2] != dsc_file and i[2][-(len(dsc_file)+1):] != '/'+dsc_file:
819 self.Logger.log(["check_dsc_against_db",i[2],dsc_file]);
822 # "[katie] has not broken them. [katie] has fixed a
823 # brokenness. Your crappy hack exploited a bug in
826 # "(Come on! I thought it was always obvious that
827 # one just doesn't release different files with
828 # the same name and version.)"
829 # -- ajk@ on d-devel@l.d.o
832 # Ignore exact matches for .orig.tar.gz
834 if dsc_file.endswith(".orig.tar.gz"):
836 if files.has_key(dsc_file) and \
837 int(files[dsc_file]["size"]) == int(i[0]) and \
838 files[dsc_file]["md5sum"] == i[1]:
839 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
844 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
845 elif dsc_file.endswith(".orig.tar.gz"):
847 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
849 # Strip out anything that isn't '%s' or '/%s$'
851 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
852 self.Logger.log(["check_dsc_against_db",i[1],dsc_file]);
856 # Unfortunately, we make get more than one
857 # match here if, for example, the package was
858 # in potato but had a -sa upload in woody. So
859 # we need to choose the right one.
861 x = ql[0]; # default to something sane in case we don't match any or have only one
865 old_file = i[0] + i[1];
866 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
867 actual_size = os.stat(old_file)[stat.ST_SIZE];
868 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
871 legacy_source_untouchable[i[3]] = "";
873 old_file = x[0] + x[1];
874 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
875 actual_size = os.stat(old_file)[stat.ST_SIZE];
878 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
879 # See install() in katie...
880 self.pkg.orig_tar_id = x[3];
881 if suite_type == "legacy" or suite_type == "legacy-mixed":
882 self.pkg.orig_tar_location = "legacy";
884 self.pkg.orig_tar_location = x[4];
886 # Not there? Check the queue directories...
888 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
889 # See process_it() in jennifer for explanation of this
890 if os.path.exists(in_unchecked):
891 return (self.reject_message, in_unchecked);
893 for dir in [ "Accepted", "New", "Byhand" ]:
894 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
895 if os.path.exists(in_otherdir):
896 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
897 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
901 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
904 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
906 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
907 self.reject("md5sum for %s doesn't match %s." % (found, file));
908 if actual_size != int(dsc_files[dsc_file]["size"]):
909 self.reject("size for %s doesn't match %s." % (found, file));
911 return (self.reject_message, orig_tar_gz);
913 def do_query(self, q):
914 sys.stderr.write("query: \"%s\" ... " % (q));
915 before = time.time();
916 r = self.projectB.query(q);
917 time_diff = time.time()-before;
918 sys.stderr.write("took %.3f seconds.\n" % (time_diff));