3 # Utility functions for katie
4 # Copyright (C) 2001, 2002 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.26 2002-08-26 18:07:24 ajt Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
147 os.chmod(dump_filename, 0660);
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
156 p = cPickle.Pickler(dump_file, 1);
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i];
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version", "maintainer",
172 "urgency", "fingerprint", "changedby822", "changedbyname",
173 "maintainername", "maintaineremail", "closes" ]:
174 d_changes[i] = changes[i];
175 # Optional changes fields
176 # FIXME: changes should be mandatory
177 for i in [ "changed-by", "maintainer822", "filecontents", "format",
178 "changes", "lisa note" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i];
182 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
186 for file in dsc_files.keys():
187 d_dsc_files[file] = {};
188 # Mandatory dsc_files fields
189 for i in [ "size", "md5sum" ]:
190 d_dsc_files[file][i] = dsc_files[file][i];
191 # Optional dsc_files fields
192 for i in [ "files id" ]:
193 if dsc_files[file].has_key(i):
194 d_dsc_files[file][i] = dsc_files[file][i];
196 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
197 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
201 ###########################################################################
203 # Set up the per-package template substitution mappings
205 def update_subst (self, reject_message = ""):
207 changes = self.pkg.changes;
208 # If jennifer crashed out in the right place, architecture may still be a string.
209 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
210 changes["architecture"] = { "Unknown" : "" };
211 # and maintainer822 may not exist.
212 if not changes.has_key("maintainer822"):
213 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
215 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
216 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
217 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
219 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
220 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
221 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
222 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
223 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
225 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
226 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
227 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
228 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
229 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
231 # Apply any global override of the Maintainer field
232 if self.Cnf.get("Dinstall::OverrideMaintainer"):
233 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
234 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
236 Subst["__REJECT_MESSAGE__"] = reject_message;
237 Subst["__SOURCE__"] = changes.get("source", "Unknown");
238 Subst["__VERSION__"] = changes.get("version", "Unknown");
240 ###########################################################################
242 def build_summaries(self):
243 changes = self.pkg.changes;
244 files = self.pkg.files;
246 byhand = summary = new = "";
248 # changes["distribution"] may not exist in corner cases
249 # (e.g. unreadable changes files)
250 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
251 changes["distribution"] = {};
253 file_keys = files.keys();
255 for file in file_keys:
256 if files[file].has_key("byhand"):
258 summary = summary + file + " byhand\n"
259 elif files[file].has_key("new"):
261 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
262 if files[file].has_key("othercomponents"):
263 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
264 if files[file]["type"] == "deb":
265 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
267 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
268 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
269 summary = summary + file + "\n to " + destination + "\n"
271 short_summary = summary;
273 # This is for direport's benefit...
274 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
277 summary = summary + "Changes: " + f;
279 summary = summary + self.announce(short_summary, 0)
281 return (summary, short_summary);
283 ###########################################################################
285 def close_bugs (self, summary, action):
286 changes = self.pkg.changes;
290 bugs = changes["closes"].keys();
296 if not self.nmu.is_an_nmu(self.pkg):
297 summary = summary + "Closing bugs: ";
299 summary = summary + "%s " % (bug);
301 Subst["__BUG_NUMBER__"] = bug;
302 if changes["distribution"].has_key("stable"):
303 Subst["__STABLE_WARNING__"] = """
304 Note that this package is not part of the released stable Debian
305 distribution. It may have dependencies on other unreleased software,
306 or other instabilities. Please take care if you wish to install it.
307 The update will eventually make its way into the next released Debian
310 Subst["__STABLE_WARNING__"] = "";
311 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
312 utils.send_mail (mail_message, "");
314 self.Logger.log(["closing bugs"]+bugs);
316 summary = summary + "Setting bugs to severity fixed: ";
317 control_message = "";
319 summary = summary + "%s " % (bug);
320 control_message = control_message + "tag %s + fixed\n" % (bug);
321 if action and control_message != "":
322 Subst["__CONTROL_MESSAGE__"] = control_message;
323 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
324 utils.send_mail (mail_message, "");
326 self.Logger.log(["setting bugs to fixed"]+bugs);
327 summary = summary + "\n";
330 ###########################################################################
332 def announce (self, short_summary, action):
335 changes = self.pkg.changes;
337 # Only do announcements for source uploads with a recent dpkg-dev installed
338 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
343 Subst["__SHORT_SUMMARY__"] = short_summary;
345 for dist in changes["distribution"].keys():
346 list = Cnf.Find("Suite::%s::Announce" % (dist));
347 if list == "" or lists_done.has_key(list):
349 lists_done[list] = 1;
350 summary = summary + "Announcing to %s\n" % (list);
353 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
354 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
355 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
356 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
357 utils.send_mail (mail_message, "");
359 if Cnf.FindB("Dinstall::CloseBugs"):
360 summary = self.close_bugs(summary, action);
364 ###########################################################################
366 def accept (self, summary, short_summary):
369 files = self.pkg.files;
372 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
374 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
376 # Move all the files into the accepted directory
377 utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
378 file_keys = files.keys();
379 for file in file_keys:
380 utils.move(file, Cnf["Dir::Queue::Accepted"]);
381 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
382 self.accept_count = self.accept_count + 1;
384 # Send accept mail, announce to lists, close bugs and check for
385 # override disparities
386 if not Cnf["Dinstall::Options::No-Mail"]:
387 Subst["__SUITE__"] = "";
388 Subst["__SUMMARY__"] = summary;
389 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
390 utils.send_mail(mail_message, "")
391 self.announce(short_summary, 1)
393 # Special support to enable clean auto-building of accepted packages
394 self.projectB.query("BEGIN WORK");
395 for suite in self.pkg.changes["distribution"].keys():
396 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
398 suite_id = db_access.get_suite_id(suite);
399 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
400 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
401 dest_dir = os.path.join(dest_dir, suite);
402 for file in file_keys:
403 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
404 dest = os.path.join(dest_dir, file);
405 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
406 # Copy it since the original won't be readable by www-data
407 utils.copy(src, dest);
409 # Create a symlink to it
410 os.symlink(src, dest);
411 # Add it to the list of packages for later processing by apt-ftparchive
412 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
413 # If the .orig.tar.gz is in the pool, create a symlink to
414 # it (if one doesn't already exist)
415 if self.pkg.orig_tar_id:
416 # Determine the .orig.tar.gz file name
417 for dsc_file in self.pkg.dsc_files.keys():
418 if dsc_file[-12:] == ".orig.tar.gz":
420 dest = os.path.join(dest_dir, filename);
421 # If it doesn't exist, create a symlink
422 if not os.path.exists(dest):
423 # Find the .orig.tar.gz in the pool
424 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
427 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
428 src = os.path.join(ql[0][0], ql[0][1]);
429 os.symlink(src, dest);
430 # Add it to the list of packages for later processing by apt-ftparchive
431 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
432 # if it does, update things to ensure it's not removed prematurely
434 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
436 self.projectB.query("COMMIT WORK");
438 ###########################################################################
440 def check_override (self):
442 changes = self.pkg.changes;
443 files = self.pkg.files;
446 # Abandon the check if:
447 # a) it's a non-sourceful upload
448 # b) override disparity checks have been disabled
449 # c) we're not sending mail
450 if not changes["architecture"].has_key("source") or \
451 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
452 Cnf["Dinstall::Options::No-Mail"]:
456 for file in files.keys():
457 if not files[file].has_key("new") and files[file]["type"] == "deb":
458 section = files[file]["section"];
459 override_section = files[file]["override section"];
460 if lower(section) != lower(override_section) and section != "-":
461 # Ignore this; it's a common mistake and not worth whining about
462 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
464 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
465 priority = files[file]["priority"];
466 override_priority = files[file]["override priority"];
467 if priority != override_priority and priority != "-":
468 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
473 Subst["__SUMMARY__"] = summary;
474 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
475 utils.send_mail (mail_message, "");
477 ###########################################################################
479 def force_move (self, files):
480 """Forcefully move files from the current directory to the reject
481 directory. If any file already exists it will be moved to the
482 morgue to make way for the new file."""
487 # Skip any files which don't exist or which we don't have permission to copy.
488 if os.access(file,os.R_OK) == 0:
490 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
492 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
494 # File exists? Let's try and move it to the morgue
495 if errno.errorcode[e.errno] == 'EEXIST':
496 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
498 morgue_file = utils.find_next_free(morgue_file);
499 except utils.tried_too_hard_exc:
500 # Something's either gone badly Pete Tong, or
501 # someone is trying to exploit us.
502 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
504 utils.move(dest_file, morgue_file, perms=0660);
506 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
509 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
513 # If we got here, we own the destination file, so we can
514 # safely overwrite it.
515 utils.move(file, dest_file, 1, perms=0660);
517 ###########################################################################
519 def do_reject (self, manual = 0, reject_message = ""):
520 # If we weren't given a manual rejection message, spawn an
521 # editor so the user can add one in...
522 if manual and not reject_message:
523 temp_filename = tempfile.mktemp();
524 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
526 editor = os.environ.get("EDITOR","vi")
529 os.system("%s %s" % (editor, temp_filename))
530 file = utils.open_file(temp_filename);
531 reject_message = string.join(file.readlines());
533 print "Reject message:";
534 print utils.prefix_multi_line_string(reject_message," ");
535 prompt = "[R]eject, Edit, Abandon, Quit ?"
537 while string.find(prompt, answer) == -1:
538 answer = utils.our_raw_input(prompt);
539 m = re_default_answer.search(prompt);
542 answer = string.upper(answer[:1]);
543 os.unlink(temp_filename);
555 reason_filename = pkg.changes_file[:-8] + ".reason";
556 reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
558 # Move all the files into the reject directory
559 reject_files = pkg.files.keys() + [pkg.changes_file];
560 self.force_move(reject_files);
562 # If we fail here someone is probably trying to exploit the race
563 # so let's just raise an exception ...
564 if os.path.exists(reject_filename):
565 os.unlink(reject_filename);
566 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
569 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
570 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
571 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
572 os.write(fd, reject_message);
574 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
576 # Build up the rejection email
577 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
579 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
580 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
581 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
582 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
584 # Write the rejection email out as the <foo>.reason file
585 os.write(fd, reject_mail_message);
588 # Send the rejection mail if appropriate
589 if not Cnf["Dinstall::Options::No-Mail"]:
590 utils.send_mail (reject_mail_message, "");
592 self.Logger.log(["rejected", pkg.changes_file]);
595 ################################################################################
597 # Ensure that source exists somewhere in the archive for the binary
598 # upload being processed.
600 # (1) exact match => 1.0-3
601 # (2) Bin-only NMU of an MU => 1.0-3.0.1
602 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
604 def source_exists (self, package, source_version):
605 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
607 # Reduce the query results to a list of version numbers
608 ql = map(lambda x: x[0], q.getresult());
611 if ql.count(source_version):
615 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
616 if ql.count(orig_source_version):
620 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
621 if ql.count(orig_source_version):
627 ################################################################################
629 def in_override_p (self, package, component, suite, binary_type, file):
630 files = self.pkg.files;
632 if binary_type == "": # must be source
637 # Override suite name; used for example with proposed-updates
638 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
639 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
641 # Avoid <undef> on unknown distributions
642 suite_id = db_access.get_suite_id(suite);
645 component_id = db_access.get_component_id(component);
646 type_id = db_access.get_override_type_id(type);
648 # FIXME: nasty non-US speficic hack
649 if lower(component[:7]) == "non-us/":
650 component = component[7:];
652 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
653 % (package, suite_id, component_id, type_id));
654 result = q.getresult();
655 # If checking for a source package fall back on the binary override type
656 if type == "dsc" and not result:
657 deb_type_id = db_access.get_override_type_id("deb");
658 udeb_type_id = db_access.get_override_type_id("udeb");
659 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
660 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
661 result = q.getresult();
663 # Remember the section and priority so we can check them later if appropriate
665 files[file]["override section"] = result[0][0];
666 files[file]["override priority"] = result[0][1];
670 ################################################################################
672 def reject (self, str, prefix="Rejected: "):
674 # Unlike other rejects we add new lines first to avoid trailing
675 # new lines when this message is passed back up to a caller.
676 if self.reject_message:
677 self.reject_message = self.reject_message + "\n";
678 self.reject_message = self.reject_message + prefix + str;
680 ################################################################################
682 def cross_suite_version_check(self, query_result, file, new_version):
683 """Ensure versions are newer than existing packages in target
684 suites and that cross-suite version checking rules as
685 set out in the conf file are satisfied."""
687 # Check versions for each target suite
688 for target_suite in self.pkg.changes["distribution"].keys():
689 must_be_newer_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
690 must_be_older_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
691 # Enforce "must be newer than target suite" even if conffile omits it
692 if target_suite not in must_be_newer_than:
693 must_be_newer_than.append(target_suite);
694 for entry in query_result:
695 existent_version = entry[0];
697 if suite in must_be_newer_than and \
698 apt_pkg.VersionCompare(new_version, existent_version) != 1:
699 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
700 if suite in must_be_older_than and \
701 apt_pkg.VersionCompare(new_version, existent_version) != -1:
702 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
704 ################################################################################
706 def check_binary_against_db(self, file):
707 self.reject_message = "";
708 files = self.pkg.files;
710 # Ensure version is sane
711 q = self.projectB.query("""
712 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
714 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
715 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
716 % (files[file]["package"],
717 files[file]["architecture"]));
718 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
720 # Check for any existing copies of the file
721 q = self.projectB.query("""
722 SELECT b.id FROM binaries b, architecture a
723 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
724 AND a.id = b.architecture"""
725 % (files[file]["package"],
726 files[file]["version"],
727 files[file]["architecture"]))
729 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
731 return self.reject_message;
733 ################################################################################
735 def check_source_against_db(self, file):
736 self.reject_message = "";
739 # Ensure version is sane
740 q = self.projectB.query("""
741 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
742 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
743 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
745 return self.reject_message;
747 ################################################################################
749 def check_dsc_against_db(self, file):
750 self.reject_message = "";
751 files = self.pkg.files;
752 dsc_files = self.pkg.dsc_files;
753 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
756 # Try and find all files mentioned in the .dsc. This has
757 # to work harder to cope with the multiple possible
758 # locations of an .orig.tar.gz.
759 for dsc_file in dsc_files.keys():
761 if files.has_key(dsc_file):
762 actual_md5 = files[dsc_file]["md5sum"];
763 actual_size = int(files[dsc_file]["size"]);
764 found = "%s in incoming" % (dsc_file)
765 # Check the file does not already exist in the archive
766 q = self.projectB.query("SELECT f.size, f.md5sum FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
768 # "It has not broken them. It has fixed a
769 # brokenness. Your crappy hack exploited a bug in
772 # "(Come on! I thought it was always obvious that
773 # one just doesn't release different files with
774 # the same name and version.)"
775 # -- ajk@ on d-devel@l.d.o
779 # Ignore exact matches for .orig.tar.gz
781 if dsc_file[-12:] == ".orig.tar.gz":
783 if files.has_key(dsc_file) and \
784 int(files[dsc_file]["size"]) == int(i[0]) and \
785 files[dsc_file]["md5sum"] == i[1]:
786 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
791 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
792 elif dsc_file[-12:] == ".orig.tar.gz":
794 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
798 # Unfortunately, we make get more than one
799 # match here if, for example, the package was
800 # in potato but had a -sa upload in woody. So
801 # we need to choose the right one.
803 x = ql[0]; # default to something sane in case we don't match any or have only one
807 old_file = i[0] + i[1];
808 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
809 actual_size = os.stat(old_file)[stat.ST_SIZE];
810 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
813 legacy_source_untouchable[i[3]] = "";
815 old_file = x[0] + x[1];
816 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
817 actual_size = os.stat(old_file)[stat.ST_SIZE];
820 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
821 # See install() in katie...
822 self.pkg.orig_tar_id = x[3];
823 if suite_type == "legacy" or suite_type == "legacy-mixed":
824 self.pkg.orig_tar_location = "legacy";
826 self.pkg.orig_tar_location = x[4];
828 # Not there? Check the queue directories...
830 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
831 # See process_it() in jennifer for explanation of this
832 if os.path.exists(in_unchecked):
833 return (self.reject_message, in_unchecked);
835 for dir in [ "Accepted", "New", "Byhand" ]:
836 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
837 if os.path.exists(in_otherdir):
838 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
839 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
843 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
846 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
848 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
849 self.reject("md5sum for %s doesn't match %s." % (found, file));
850 if actual_size != int(dsc_files[dsc_file]["size"]):
851 self.reject("size for %s doesn't match %s." % (found, file));
853 return (self.reject_message, orig_tar_gz);
855 def do_query(self, q):
856 sys.stderr.write("query: \"%s\" ... " % (q));
857 before = time.time();
858 r = self.projectB.query(q);
859 time_diff = time.time()-before;
860 sys.stderr.write("took %.3f seconds.\n" % (time_diff));