3 # Utility functions for katie
4 # Copyright (C) 2001 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.16 2002-04-24 01:56:24 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents" ]:
156 if files[file].has_key(i):
157 d_files[file][i] = files[file][i];
159 # Mandatory changes fields
160 for i in [ "distribution", "source", "architecture", "version", "maintainer",
161 "urgency", "fingerprint", "changedby822", "changedbyname",
162 "maintainername", "maintaineremail", "closes" ]:
163 d_changes[i] = changes[i];
164 # Optional changes fields
165 # FIXME: changes should be mandatory
166 for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
167 if changes.has_key(i):
168 d_changes[i] = changes[i];
170 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
174 for file in dsc_files.keys():
175 d_dsc_files[file] = {};
176 # Mandatory dsc_files fields
177 for i in [ "size", "md5sum" ]:
178 d_dsc_files[file][i] = dsc_files[file][i];
179 # Optional dsc_files fields
180 for i in [ "files id" ]:
181 if dsc_files[file].has_key(i):
182 d_dsc_files[file][i] = dsc_files[file][i];
184 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
185 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
189 ###########################################################################
191 # Set up the per-package template substitution mappings
193 def update_subst (self, reject_message = ""):
195 changes = self.pkg.changes;
196 # If jennifer crashed out in the right place, architecture may still be a string.
197 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
198 changes["architecture"] = { "Unknown" : "" };
199 # and maintainer822 may not exist.
200 if not changes.has_key("maintainer822"):
201 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
203 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
204 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
205 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
207 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
208 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
209 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
210 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
211 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
213 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
214 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
215 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
216 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
217 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
219 Subst["__REJECT_MESSAGE__"] = reject_message;
220 Subst["__SOURCE__"] = changes.get("source", "Unknown");
221 Subst["__VERSION__"] = changes.get("version", "Unknown");
223 ###########################################################################
225 def build_summaries(self):
226 changes = self.pkg.changes;
227 files = self.pkg.files;
229 byhand = summary = new = "";
231 # changes["distribution"] may not exist in corner cases
232 # (e.g. unreadable changes files)
233 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
234 changes["distribution"] = {};
236 file_keys = files.keys();
238 for file in file_keys:
239 if files[file].has_key("byhand"):
241 summary = summary + file + " byhand\n"
242 elif files[file].has_key("new"):
244 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
245 if files[file].has_key("othercomponents"):
246 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
247 if files[file]["type"] == "deb":
248 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
250 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
251 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
252 summary = summary + file + "\n to " + destination + "\n"
254 short_summary = summary;
256 # This is for direport's benefit...
257 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
260 summary = summary + "Changes: " + f;
262 summary = summary + self.announce(short_summary, 0)
264 return (summary, short_summary);
266 ###########################################################################
268 def announce (self, short_summary, action):
271 changes = self.pkg.changes;
274 # Only do announcements for source uploads with a recent dpkg-dev installed
275 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
280 Subst["__SHORT_SUMMARY__"] = short_summary;
282 for dist in changes["distribution"].keys():
283 list = Cnf.Find("Suite::%s::Announce" % (dist))
284 if list == "" or lists_done.has_key(list):
287 summary = summary + "Announcing to %s\n" % (list)
290 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
291 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
292 utils.send_mail (mail_message, "")
294 bugs = changes["closes"].keys()
296 if not self.nmu.is_an_nmu(self.pkg):
297 summary = summary + "Closing bugs: "
299 summary = summary + "%s " % (bug)
301 Subst["__BUG_NUMBER__"] = bug;
302 if changes["distribution"].has_key("stable"):
303 Subst["__STABLE_WARNING__"] = """
304 Note that this package is not part of the released stable Debian
305 distribution. It may have dependencies on other unreleased software,
306 or other instabilities. Please take care if you wish to install it.
307 The update will eventually make its way into the next released Debian
310 Subst["__STABLE_WARNING__"] = "";
311 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
312 utils.send_mail (mail_message, "")
314 self.Logger.log(["closing bugs"]+bugs);
316 summary = summary + "Setting bugs to severity fixed: "
319 summary = summary + "%s " % (bug)
320 control_message = control_message + "tag %s + fixed\n" % (bug)
321 if action and control_message != "":
322 Subst["__CONTROL_MESSAGE__"] = control_message;
323 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
324 utils.send_mail (mail_message, "")
326 self.Logger.log(["setting bugs to fixed"]+bugs);
327 summary = summary + "\n"
331 ###########################################################################
333 def accept (self, summary, short_summary):
336 files = self.pkg.files;
339 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
341 self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
343 # Move all the files into the accepted directory
344 utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
345 file_keys = files.keys();
346 for file in file_keys:
347 utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
348 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
349 self.accept_count = self.accept_count + 1;
351 # Send accept mail, announce to lists, close bugs and check for
352 # override disparities
353 if not Cnf["Dinstall::Options::No-Mail"]:
354 Subst["__SUITE__"] = "";
355 Subst["__SUMMARY__"] = summary;
356 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
357 utils.send_mail(mail_message, "")
358 self.announce(short_summary, 1)
360 # Special support to enable clean auto-building of accepted packages
361 if Cnf.get("Dinstall::SpecialAcceptedAutoBuild") and \
362 self.pkg.changes["distribution"].has_key("unstable"):
363 self.projectB.query("BEGIN WORK");
364 for file in file_keys:
365 src = os.path.join(Cnf["Dir::QueueAcceptedDir"], file);
366 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
367 # Create a symlink to it
368 os.symlink(src, dest);
369 # Add it to the list of packages for later processing by apt-ftparchive
370 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
371 # If the .orig.tar.gz is in the pool, create a symlink to
372 # it (if one doesn't already exist)
373 if self.pkg.orig_tar_id:
374 # Determine the .orig.tar.gz file name
375 for dsc_file in self.pkg.dsc_files.keys():
376 if dsc_file[-12:] == ".orig.tar.gz":
378 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
379 # If it doesn't exist, create a symlink
380 if not os.path.exists(dest):
381 # Find the .orig.tar.gz in the pool
382 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
385 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
386 src = os.path.join(ql[0][0], ql[0][1]);
387 os.symlink(src, dest);
388 # Add it to the list of packages for later processing by apt-ftparchive
389 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
391 self.projectB.query("COMMIT WORK");
393 ###########################################################################
395 def check_override (self):
397 changes = self.pkg.changes;
398 files = self.pkg.files;
400 # Only check section & priority on sourceful uploads
401 if not changes["architecture"].has_key("source"):
405 for file in files.keys():
406 if not files[file].has_key("new") and files[file]["type"] == "deb":
407 section = files[file]["section"];
408 override_section = files[file]["override section"];
409 if lower(section) != lower(override_section) and section != "-":
410 # Ignore this; it's a common mistake and not worth whining about
411 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
413 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
414 priority = files[file]["priority"];
415 override_priority = files[file]["override priority"];
416 if priority != override_priority and priority != "-":
417 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
422 Subst["__SUMMARY__"] = summary;
423 mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
424 utils.send_mail (mail_message, "");
426 ###########################################################################
428 def force_move (self, files):
429 """Forcefully move files from the current directory to the reject
430 directory. If any file already exists it will be moved to the
431 morgue to make way for the new file."""
436 # Skip any files which don't exist or which we don't have permission to copy.
437 if os.access(file,os.R_OK) == 0:
439 dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
441 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
443 # File exists? Let's try and move it to the morgue
444 if errno.errorcode[e.errno] == 'EEXIST':
445 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
447 morgue_file = utils.find_next_free(morgue_file);
448 except utils.tried_too_hard_exc:
449 # Something's either gone badly Pete Tong, or
450 # someone is trying to exploit us.
451 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
453 utils.move(dest_file, morgue_file, perms=0660);
455 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
458 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
462 # If we got here, we own the destination file, so we can
463 # safely overwrite it.
464 utils.move(file, dest_file, 1, perms=0660);
466 ###########################################################################
468 def do_reject (self, manual = 0, reject_message = ""):
469 # If we weren't given a manual rejection message, spawn an
470 # editor so the user can add one in...
471 if manual and not reject_message:
472 temp_filename = tempfile.mktemp();
473 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
475 editor = os.environ.get("EDITOR","vi")
478 os.system("%s %s" % (editor, temp_filename))
479 file = utils.open_file(temp_filename);
480 reject_message = string.join(file.readlines());
482 print "Reject message:";
483 print utils.prefix_multi_line_string(reject_message," ");
484 prompt = "[R]eject, Edit, Abandon, Quit ?"
486 while string.find(prompt, answer) == -1:
487 answer = utils.our_raw_input(prompt);
488 m = re_default_answer.search(prompt);
491 answer = string.upper(answer[:1]);
492 os.unlink(temp_filename);
504 reason_filename = pkg.changes_file[:-8] + ".reason";
505 reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
507 # Move all the files into the reject directory
508 reject_files = pkg.files.keys() + [pkg.changes_file];
509 self.force_move(reject_files);
511 # If we fail here someone is probably trying to exploit the race
512 # so let's just raise an exception ...
513 if os.path.exists(reject_filename):
514 os.unlink(reject_filename);
515 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
518 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
519 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
520 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
521 os.write(fd, reject_message);
523 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
525 # Build up the rejection email
526 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
528 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
529 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
530 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
531 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
533 # Write the rejection email out as the <foo>.reason file
534 os.write(fd, reject_mail_message);
537 # Send the rejection mail if appropriate
538 if not Cnf["Dinstall::Options::No-Mail"]:
539 utils.send_mail (reject_mail_message, "");
541 self.Logger.log(["rejected", pkg.changes_file]);
544 ################################################################################
546 # Ensure that source exists somewhere in the archive for the binary
547 # upload being processed.
549 # (1) exact match => 1.0-3
550 # (2) Bin-only NMU of an MU => 1.0-3.0.1
551 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
553 def source_exists (self, package, source_version):
554 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
556 # Reduce the query results to a list of version numbers
557 ql = map(lambda x: x[0], q.getresult());
560 if ql.count(source_version):
564 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
565 if ql.count(orig_source_version):
569 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
570 if ql.count(orig_source_version):
576 ################################################################################
578 def in_override_p (self, package, component, suite, binary_type, file):
579 files = self.pkg.files;
581 if binary_type == "": # must be source
586 # Override suite name; used for example with proposed-updates
587 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
588 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
590 # Avoid <undef> on unknown distributions
591 suite_id = db_access.get_suite_id(suite);
594 component_id = db_access.get_component_id(component);
595 type_id = db_access.get_override_type_id(type);
597 # FIXME: nasty non-US speficic hack
598 if lower(component[:7]) == "non-us/":
599 component = component[7:];
601 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
602 % (package, suite_id, component_id, type_id));
603 result = q.getresult();
604 # If checking for a source package fall back on the binary override type
605 if type == "dsc" and not result:
606 type_id = db_access.get_override_type_id("deb");
607 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
608 % (package, suite_id, component_id, type_id));
609 result = q.getresult();
611 # Remember the section and priority so we can check them later if appropriate
613 files[file]["override section"] = result[0][0];
614 files[file]["override priority"] = result[0][1];
618 ################################################################################
620 def reject (self, str, prefix="Rejected: "):
622 # Unlike other rejects we add new lines first to avoid trailing
623 # new lines when this message is passed back up to a caller.
624 if self.reject_message:
625 self.reject_message = self.reject_message + "\n";
626 self.reject_message = self.reject_message + prefix + str;
628 def check_binaries_against_db(self, file, suite):
629 self.reject_message = "";
630 files = self.pkg.files;
632 # Find any old binary packages
633 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
634 % (files[file]["package"], suite, files[file]["architecture"]))
635 for oldfile in q.dictresult():
636 files[file]["oldfiles"][suite] = oldfile;
637 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
638 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
639 self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
640 # Check for any existing copies of the file
641 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
642 if q.getresult() != []:
643 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
645 return self.reject_message;
647 ################################################################################
649 def check_source_against_db(self, file):
650 """Ensure source is newer than existing source in target suites."""
651 self.reject_message = "";
652 changes = self.pkg.changes;
655 package = dsc.get("source");
656 new_version = dsc.get("version");
657 for suite in changes["distribution"].keys():
658 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
660 ql = map(lambda x: x[0], q.getresult());
661 for old_version in ql:
662 if apt_pkg.VersionCompare(new_version, old_version) != 1:
663 self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
664 return self.reject_message;
666 ################################################################################
668 def check_dsc_against_db(self, file):
669 self.reject_message = "";
670 files = self.pkg.files;
671 dsc_files = self.pkg.dsc_files;
672 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
675 # Try and find all files mentioned in the .dsc. This has
676 # to work harder to cope with the multiple possible
677 # locations of an .orig.tar.gz.
678 for dsc_file in dsc_files.keys():
680 if files.has_key(dsc_file):
681 actual_md5 = files[dsc_file]["md5sum"];
682 actual_size = int(files[dsc_file]["size"]);
683 found = "%s in incoming" % (dsc_file)
684 # Check the file does not already exist in the archive
685 q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
687 # "It has not broken them. It has fixed a
688 # brokenness. Your crappy hack exploited a bug in
691 # "(Come on! I thought it was always obvious that
692 # one just doesn't release different files with
693 # the same name and version.)"
694 # -- ajk@ on d-devel@l.d.o
696 if q.getresult() != []:
697 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
698 elif dsc_file[-12:] == ".orig.tar.gz":
700 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
704 # Unfortunately, we make get more than one
705 # match here if, for example, the package was
706 # in potato but had a -sa upload in woody. So
707 # we need to choose the right one.
709 x = ql[0]; # default to something sane in case we don't match any or have only one
713 old_file = i[0] + i[1];
714 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
715 actual_size = os.stat(old_file)[stat.ST_SIZE];
716 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
719 legacy_source_untouchable[i[3]] = "";
721 old_file = x[0] + x[1];
722 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
723 actual_size = os.stat(old_file)[stat.ST_SIZE];
726 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
727 # See install() in katie...
728 self.pkg.orig_tar_id = x[3];
729 if suite_type == "legacy" or suite_type == "legacy-mixed":
730 self.pkg.orig_tar_location = "legacy";
732 self.pkg.orig_tar_location = x[4];
734 # Not there? Check the queue directories...
736 in_unchecked = os.path.join(self.Cnf["Dir::QueueUncheckedDir"],dsc_file);
737 # See process_it() in jennifer for explanation of this
738 if os.path.exists(in_unchecked):
739 return (self.reject_message, in_unchecked);
741 for dir in [ "Accepted", "New", "Byhand" ]:
742 in_otherdir = os.path.join(self.Cnf["Dir::Queue%sDir" % (dir)],dsc_file);
743 if os.path.exists(in_otherdir):
744 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
745 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
749 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
752 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
754 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
755 self.reject("md5sum for %s doesn't match %s." % (found, file));
756 if actual_size != int(dsc_files[dsc_file]["size"]):
757 self.reject("size for %s doesn't match %s." % (found, file));
759 return (self.reject_message, orig_tar_gz);
761 def do_query(self, q):
762 sys.stderr.write("query: \"%s\" ... " % (q));
763 before = time.time();
764 r = self.projectB.query(q);
765 time_diff = time.time()-before;
766 sys.stderr.write("took %.3f seconds.\n" % (time_diff));