3 # Utility functions for katie
4 # Copyright (C) 2001, 2002 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.19 2002-05-10 00:24:33 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents",
157 if files[file].has_key(i):
158 d_files[file][i] = files[file][i];
160 # Mandatory changes fields
161 for i in [ "distribution", "source", "architecture", "version", "maintainer",
162 "urgency", "fingerprint", "changedby822", "changedbyname",
163 "maintainername", "maintaineremail", "closes" ]:
164 d_changes[i] = changes[i];
165 # Optional changes fields
166 # FIXME: changes should be mandatory
167 for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
168 if changes.has_key(i):
169 d_changes[i] = changes[i];
171 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
175 for file in dsc_files.keys():
176 d_dsc_files[file] = {};
177 # Mandatory dsc_files fields
178 for i in [ "size", "md5sum" ]:
179 d_dsc_files[file][i] = dsc_files[file][i];
180 # Optional dsc_files fields
181 for i in [ "files id" ]:
182 if dsc_files[file].has_key(i):
183 d_dsc_files[file][i] = dsc_files[file][i];
185 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
186 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
190 ###########################################################################
192 # Set up the per-package template substitution mappings
194 def update_subst (self, reject_message = ""):
196 changes = self.pkg.changes;
197 # If jennifer crashed out in the right place, architecture may still be a string.
198 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
199 changes["architecture"] = { "Unknown" : "" };
200 # and maintainer822 may not exist.
201 if not changes.has_key("maintainer822"):
202 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
204 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
205 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
206 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
208 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
209 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
210 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
211 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
212 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
214 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
215 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
216 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
217 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
218 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
220 # Apply any global override of the Maintainer field
221 if self.Cnf.get("Dinstall::OverrideMaintainer"):
222 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
223 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
225 Subst["__REJECT_MESSAGE__"] = reject_message;
226 Subst["__SOURCE__"] = changes.get("source", "Unknown");
227 Subst["__VERSION__"] = changes.get("version", "Unknown");
229 ###########################################################################
231 def build_summaries(self):
232 changes = self.pkg.changes;
233 files = self.pkg.files;
235 byhand = summary = new = "";
237 # changes["distribution"] may not exist in corner cases
238 # (e.g. unreadable changes files)
239 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
240 changes["distribution"] = {};
242 file_keys = files.keys();
244 for file in file_keys:
245 if files[file].has_key("byhand"):
247 summary = summary + file + " byhand\n"
248 elif files[file].has_key("new"):
250 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
251 if files[file].has_key("othercomponents"):
252 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
253 if files[file]["type"] == "deb":
254 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
256 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
257 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
258 summary = summary + file + "\n to " + destination + "\n"
260 short_summary = summary;
262 # This is for direport's benefit...
263 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
266 summary = summary + "Changes: " + f;
268 summary = summary + self.announce(short_summary, 0)
270 return (summary, short_summary);
272 ###########################################################################
274 def close_bugs (self, summary, action):
275 changes = self.pkg.changes;
279 bugs = changes["closes"].keys();
285 if not self.nmu.is_an_nmu(self.pkg):
286 summary = summary + "Closing bugs: ";
288 summary = summary + "%s " % (bug);
290 Subst["__BUG_NUMBER__"] = bug;
291 if changes["distribution"].has_key("stable"):
292 Subst["__STABLE_WARNING__"] = """
293 Note that this package is not part of the released stable Debian
294 distribution. It may have dependencies on other unreleased software,
295 or other instabilities. Please take care if you wish to install it.
296 The update will eventually make its way into the next released Debian
299 Subst["__STABLE_WARNING__"] = "";
300 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
301 utils.send_mail (mail_message, "");
303 self.Logger.log(["closing bugs"]+bugs);
305 summary = summary + "Setting bugs to severity fixed: ";
306 control_message = "";
308 summary = summary + "%s " % (bug);
309 control_message = control_message + "tag %s + fixed\n" % (bug);
310 if action and control_message != "":
311 Subst["__CONTROL_MESSAGE__"] = control_message;
312 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
313 utils.send_mail (mail_message, "");
315 self.Logger.log(["setting bugs to fixed"]+bugs);
316 summary = summary + "\n";
319 ###########################################################################
321 def announce (self, short_summary, action):
324 changes = self.pkg.changes;
327 # Only do announcements for source uploads with a recent dpkg-dev installed
328 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
333 Subst["__SHORT_SUMMARY__"] = short_summary;
335 for dist in changes["distribution"].keys():
336 list = Cnf.Find("Suite::%s::Announce" % (dist));
337 if list == "" or lists_done.has_key(list):
339 lists_done[list] = 1;
340 summary = summary + "Announcing to %s\n" % (list);
343 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
344 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
345 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
346 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
347 utils.send_mail (mail_message, "");
349 if Cnf.get("Dinstall::CloseBugs"):
350 summary = self.close_bugs(summary, action);
354 ###########################################################################
356 def accept (self, summary, short_summary):
359 files = self.pkg.files;
362 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
364 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
366 # Move all the files into the accepted directory
367 utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
368 file_keys = files.keys();
369 for file in file_keys:
370 utils.move(file, Cnf["Dir::Queue::Accepted"]);
371 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
372 self.accept_count = self.accept_count + 1;
374 # Send accept mail, announce to lists, close bugs and check for
375 # override disparities
376 if not Cnf["Dinstall::Options::No-Mail"]:
377 Subst["__SUITE__"] = "";
378 Subst["__SUMMARY__"] = summary;
379 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
380 utils.send_mail(mail_message, "")
381 self.announce(short_summary, 1)
383 # Special support to enable clean auto-building of accepted packages
384 if Cnf.FindB("Dinstall::SpecialAcceptedAutoBuild") and \
385 self.pkg.changes["distribution"].has_key("unstable"):
386 self.projectB.query("BEGIN WORK");
387 for file in file_keys:
388 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
389 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
390 # Create a symlink to it
391 os.symlink(src, dest);
392 # Add it to the list of packages for later processing by apt-ftparchive
393 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
394 # If the .orig.tar.gz is in the pool, create a symlink to
395 # it (if one doesn't already exist)
396 if self.pkg.orig_tar_id:
397 # Determine the .orig.tar.gz file name
398 for dsc_file in self.pkg.dsc_files.keys():
399 if dsc_file[-12:] == ".orig.tar.gz":
401 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
402 # If it doesn't exist, create a symlink
403 if not os.path.exists(dest):
404 # Find the .orig.tar.gz in the pool
405 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
408 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
409 src = os.path.join(ql[0][0], ql[0][1]);
410 os.symlink(src, dest);
411 # Add it to the list of packages for later processing by apt-ftparchive
412 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
414 self.projectB.query("COMMIT WORK");
416 ###########################################################################
418 def check_override (self):
420 changes = self.pkg.changes;
421 files = self.pkg.files;
424 # Abandon the check if:
425 # a) it's a non-sourceful upload
426 # b) override disparity checks have been disabled
427 # c) we're not sending mail
428 if not changes["architecture"].has_key("source") or \
429 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
430 Cnf["Dinstall::Options::No-Mail"]:
434 for file in files.keys():
435 if not files[file].has_key("new") and files[file]["type"] == "deb":
436 section = files[file]["section"];
437 override_section = files[file]["override section"];
438 if lower(section) != lower(override_section) and section != "-":
439 # Ignore this; it's a common mistake and not worth whining about
440 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
442 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
443 priority = files[file]["priority"];
444 override_priority = files[file]["override priority"];
445 if priority != override_priority and priority != "-":
446 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
451 Subst["__SUMMARY__"] = summary;
452 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
453 utils.send_mail (mail_message, "");
455 ###########################################################################
457 def force_move (self, files):
458 """Forcefully move files from the current directory to the reject
459 directory. If any file already exists it will be moved to the
460 morgue to make way for the new file."""
465 # Skip any files which don't exist or which we don't have permission to copy.
466 if os.access(file,os.R_OK) == 0:
468 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
470 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
472 # File exists? Let's try and move it to the morgue
473 if errno.errorcode[e.errno] == 'EEXIST':
474 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
476 morgue_file = utils.find_next_free(morgue_file);
477 except utils.tried_too_hard_exc:
478 # Something's either gone badly Pete Tong, or
479 # someone is trying to exploit us.
480 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
482 utils.move(dest_file, morgue_file, perms=0660);
484 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
487 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
491 # If we got here, we own the destination file, so we can
492 # safely overwrite it.
493 utils.move(file, dest_file, 1, perms=0660);
495 ###########################################################################
497 def do_reject (self, manual = 0, reject_message = ""):
498 # If we weren't given a manual rejection message, spawn an
499 # editor so the user can add one in...
500 if manual and not reject_message:
501 temp_filename = tempfile.mktemp();
502 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
504 editor = os.environ.get("EDITOR","vi")
507 os.system("%s %s" % (editor, temp_filename))
508 file = utils.open_file(temp_filename);
509 reject_message = string.join(file.readlines());
511 print "Reject message:";
512 print utils.prefix_multi_line_string(reject_message," ");
513 prompt = "[R]eject, Edit, Abandon, Quit ?"
515 while string.find(prompt, answer) == -1:
516 answer = utils.our_raw_input(prompt);
517 m = re_default_answer.search(prompt);
520 answer = string.upper(answer[:1]);
521 os.unlink(temp_filename);
533 reason_filename = pkg.changes_file[:-8] + ".reason";
534 reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
536 # Move all the files into the reject directory
537 reject_files = pkg.files.keys() + [pkg.changes_file];
538 self.force_move(reject_files);
540 # If we fail here someone is probably trying to exploit the race
541 # so let's just raise an exception ...
542 if os.path.exists(reject_filename):
543 os.unlink(reject_filename);
544 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
547 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
548 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
549 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
550 os.write(fd, reject_message);
552 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
554 # Build up the rejection email
555 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
557 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
558 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
559 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
560 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
562 # Write the rejection email out as the <foo>.reason file
563 os.write(fd, reject_mail_message);
566 # Send the rejection mail if appropriate
567 if not Cnf["Dinstall::Options::No-Mail"]:
568 utils.send_mail (reject_mail_message, "");
570 self.Logger.log(["rejected", pkg.changes_file]);
573 ################################################################################
575 # Ensure that source exists somewhere in the archive for the binary
576 # upload being processed.
578 # (1) exact match => 1.0-3
579 # (2) Bin-only NMU of an MU => 1.0-3.0.1
580 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
582 def source_exists (self, package, source_version):
583 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
585 # Reduce the query results to a list of version numbers
586 ql = map(lambda x: x[0], q.getresult());
589 if ql.count(source_version):
593 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
594 if ql.count(orig_source_version):
598 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
599 if ql.count(orig_source_version):
605 ################################################################################
607 def in_override_p (self, package, component, suite, binary_type, file):
608 files = self.pkg.files;
610 if binary_type == "": # must be source
615 # Override suite name; used for example with proposed-updates
616 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
617 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
619 # Avoid <undef> on unknown distributions
620 suite_id = db_access.get_suite_id(suite);
623 component_id = db_access.get_component_id(component);
624 type_id = db_access.get_override_type_id(type);
626 # FIXME: nasty non-US speficic hack
627 if lower(component[:7]) == "non-us/":
628 component = component[7:];
630 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
631 % (package, suite_id, component_id, type_id));
632 result = q.getresult();
633 # If checking for a source package fall back on the binary override type
634 if type == "dsc" and not result:
635 type_id = db_access.get_override_type_id("deb");
636 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
637 % (package, suite_id, component_id, type_id));
638 result = q.getresult();
640 # Remember the section and priority so we can check them later if appropriate
642 files[file]["override section"] = result[0][0];
643 files[file]["override priority"] = result[0][1];
647 ################################################################################
649 def reject (self, str, prefix="Rejected: "):
651 # Unlike other rejects we add new lines first to avoid trailing
652 # new lines when this message is passed back up to a caller.
653 if self.reject_message:
654 self.reject_message = self.reject_message + "\n";
655 self.reject_message = self.reject_message + prefix + str;
657 def check_binaries_against_db(self, file, suite):
658 self.reject_message = "";
659 files = self.pkg.files;
661 # Find any old binary packages
662 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
663 % (files[file]["package"], suite, files[file]["architecture"]))
664 for oldfile in q.dictresult():
665 files[file]["oldfiles"][suite] = oldfile;
666 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
667 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
668 self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
669 # Check for any existing copies of the file
670 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
671 if q.getresult() != []:
672 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
674 return self.reject_message;
676 ################################################################################
678 def check_source_against_db(self, file):
679 """Ensure source is newer than existing source in target suites."""
680 self.reject_message = "";
681 changes = self.pkg.changes;
684 package = dsc.get("source");
685 new_version = dsc.get("version");
686 for suite in changes["distribution"].keys():
687 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
689 ql = map(lambda x: x[0], q.getresult());
690 for old_version in ql:
691 if apt_pkg.VersionCompare(new_version, old_version) != 1:
692 self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
693 return self.reject_message;
695 ################################################################################
697 def check_dsc_against_db(self, file):
698 self.reject_message = "";
699 files = self.pkg.files;
700 dsc_files = self.pkg.dsc_files;
701 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
704 # Try and find all files mentioned in the .dsc. This has
705 # to work harder to cope with the multiple possible
706 # locations of an .orig.tar.gz.
707 for dsc_file in dsc_files.keys():
709 if files.has_key(dsc_file):
710 actual_md5 = files[dsc_file]["md5sum"];
711 actual_size = int(files[dsc_file]["size"]);
712 found = "%s in incoming" % (dsc_file)
713 # Check the file does not already exist in the archive
714 q = self.projectB.query("SELECT f.size, f.md5sum FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
716 # "It has not broken them. It has fixed a
717 # brokenness. Your crappy hack exploited a bug in
720 # "(Come on! I thought it was always obvious that
721 # one just doesn't release different files with
722 # the same name and version.)"
723 # -- ajk@ on d-devel@l.d.o
727 # Ignore exact matches for .orig.tar.gz
729 if dsc_file[-12:] == ".orig.tar.gz":
731 if files.has_key(dsc_file) and \
732 int(files[dsc_file]["size"]) == int(i[0]) and \
733 files[dsc_file]["md5sum"] == i[1]:
734 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
739 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
740 elif dsc_file[-12:] == ".orig.tar.gz":
742 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
746 # Unfortunately, we make get more than one
747 # match here if, for example, the package was
748 # in potato but had a -sa upload in woody. So
749 # we need to choose the right one.
751 x = ql[0]; # default to something sane in case we don't match any or have only one
755 old_file = i[0] + i[1];
756 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
757 actual_size = os.stat(old_file)[stat.ST_SIZE];
758 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
761 legacy_source_untouchable[i[3]] = "";
763 old_file = x[0] + x[1];
764 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
765 actual_size = os.stat(old_file)[stat.ST_SIZE];
768 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
769 # See install() in katie...
770 self.pkg.orig_tar_id = x[3];
771 if suite_type == "legacy" or suite_type == "legacy-mixed":
772 self.pkg.orig_tar_location = "legacy";
774 self.pkg.orig_tar_location = x[4];
776 # Not there? Check the queue directories...
778 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
779 # See process_it() in jennifer for explanation of this
780 if os.path.exists(in_unchecked):
781 return (self.reject_message, in_unchecked);
783 for dir in [ "Accepted", "New", "Byhand" ]:
784 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
785 if os.path.exists(in_otherdir):
786 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
787 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
791 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
794 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
796 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
797 self.reject("md5sum for %s doesn't match %s." % (found, file));
798 if actual_size != int(dsc_files[dsc_file]["size"]):
799 self.reject("size for %s doesn't match %s." % (found, file));
801 return (self.reject_message, orig_tar_gz);
803 def do_query(self, q):
804 sys.stderr.write("query: \"%s\" ... " % (q));
805 before = time.time();
806 r = self.projectB.query(q);
807 time_diff = time.time()-before;
808 sys.stderr.write("took %.3f seconds.\n" % (time_diff));