3 # Utility functions for katie
4 # Copyright (C) 2001 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.3 2002-02-15 02:54:22 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents" ]:
156 if files[file].has_key(i):
157 d_files[file][i] = files[file][i];
159 # Mandatory changes fields
160 for i in [ "distribution", "source", "architecture", "version", "maintainer",
161 "urgency", "fingerprint" ]:
162 d_changes[i] = changes[i];
163 # Optional changes fields
164 for i in [ "changed-by", "changedby822", "maintainer822", "filecontents" ]:
165 if d_changes.has_key(i):
166 d_changes[i] = changes[i];
168 for i in [ "source", "version", "maintainer", "fingerprint" ]:
172 for file in dsc_files.keys():
173 d_dsc_files[file] = {};
174 # Mandatory dsc_files fields
175 for i in [ "size", "md5sum" ]:
176 d_dsc_files[file][i] = dsc_files[file][i];
177 # Optional dsc_files fields
178 for i in [ "files id" ]:
179 if dsc_files[file].has_key(i):
180 d_dsc_files[file][i] = dsc_files[file][i];
182 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
183 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
187 ###########################################################################
189 # Set up the per-package template substitution mappings
191 def update_subst (self, reject_message = ""):
193 changes = self.pkg.changes;
194 # If jennifer crashed out in the right place, architecture may still be a string.
195 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
196 changes["architecture"] = { "Unknown" : "" };
197 # and maintainer822 may not exist.
198 if not changes.has_key("maintainer822"):
199 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
201 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
202 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
203 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
205 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
206 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
207 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
208 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
209 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
211 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
212 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
213 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
214 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
215 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
217 Subst["__REJECT_MESSAGE__"] = reject_message;
218 Subst["__SOURCE__"] = changes.get("source", "Unknown");
219 Subst["__VERSION__"] = changes.get("version", "Unknown");
221 ###########################################################################
223 def build_summaries(self):
224 changes = self.pkg.changes;
225 files = self.pkg.files;
227 byhand = summary = new = "";
229 # changes["distribution"] may not exist in corner cases
230 # (e.g. unreadable changes files)
231 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
232 changes["distribution"] = {};
234 file_keys = files.keys();
236 for file in file_keys:
237 if files[file].has_key("byhand"):
239 summary = summary + file + " byhand\n"
240 elif files[file].has_key("new"):
242 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
243 if files[file].has_key("othercomponents"):
244 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
245 if files[file]["type"] == "deb":
246 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
248 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
249 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
250 summary = summary + file + "\n to " + destination + "\n"
252 short_summary = summary;
254 # This is for direport's benefit...
255 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
258 summary = summary + "Changes: " + f;
260 summary = summary + self.announce(short_summary, 0)
262 return (summary, short_summary);
264 ###########################################################################
266 def announce (self, short_summary, action):
269 changes = self.pkg.changes;
272 # Only do announcements for source uploads with a recent dpkg-dev installed
273 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
278 Subst["__SHORT_SUMMARY__"] = short_summary;
280 for dist in changes["distribution"].keys():
281 list = Cnf.Find("Suite::%s::Announce" % (dist))
282 if list == "" or lists_done.has_key(list):
285 summary = summary + "Announcing to %s\n" % (list)
288 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
289 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
290 utils.send_mail (mail_message, "")
292 bugs = changes["closes"].keys()
294 if not self.nmu.is_an_nmu(self.pkg):
295 summary = summary + "Closing bugs: "
297 summary = summary + "%s " % (bug)
299 Subst["__BUG_NUMBER__"] = bug;
300 if changes["distribution"].has_key("stable"):
301 Subst["__STABLE_WARNING__"] = """
302 Note that this package is not part of the released stable Debian
303 distribution. It may have dependencies on other unreleased software,
304 or other instabilities. Please take care if you wish to install it.
305 The update will eventually make its way into the next released Debian
308 Subst["__STABLE_WARNING__"] = "";
309 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
310 utils.send_mail (mail_message, "")
312 self.Logger.log(["closing bugs"]+bugs);
314 summary = summary + "Setting bugs to severity fixed: "
317 summary = summary + "%s " % (bug)
318 control_message = control_message + "tag %s + fixed\n" % (bug)
319 if action and control_message != "":
320 Subst["__CONTROL_MESSAGE__"] = control_message;
321 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
322 utils.send_mail (mail_message, "")
324 self.Logger.log(["setting bugs to fixed"]+bugs);
325 summary = summary + "\n"
329 ###########################################################################
331 def accept (self, summary, short_summary):
334 files = self.pkg.files;
337 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
339 self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
341 # Move all the files into the accepted directory
342 utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
343 file_keys = files.keys();
344 for file in file_keys:
345 utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
346 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
347 self.accept_count = self.accept_count + 1;
349 # Send accept mail, announce to lists, close bugs and check for
350 # override disparities
351 if not Cnf["Dinstall::Options::No-Mail"]:
352 Subst["__SUITE__"] = "";
353 Subst["__SUMMARY__"] = summary;
354 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
355 utils.send_mail(mail_message, "")
356 self.announce(short_summary, 1)
358 ###########################################################################
360 def check_override (self):
362 changes = self.pkg.changes;
363 files = self.pkg.files;
365 # Only check section & priority on sourceful uploads
366 if not changes["architecture"].has_key("source"):
370 for file in files.keys():
371 if not files[file].has_key("new") and files[file]["type"] == "deb":
372 section = files[file]["section"];
373 override_section = files[file]["override section"];
374 if lower(section) != lower(override_section) and section != "-":
375 # Ignore this; it's a common mistake and not worth whining about
376 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
378 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
379 priority = files[file]["priority"];
380 override_priority = files[file]["override priority"];
381 if priority != override_priority and priority != "-":
382 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
387 Subst["__SUMMARY__"] = summary;
388 mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
389 utils.send_mail (mail_message, "");
391 ###########################################################################
393 def force_move (self, files):
394 """Forcefully move files from the current directory to the reject
395 directory. If any file already exists it will be moved to the
396 morgue to make way for the new file."""
401 # Skip any files which don't exist or which we don't have permission to copy.
402 if os.access(file,os.R_OK) == 0:
404 dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
406 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
408 # File exists? Let's try and move it to the morgue
409 if errno.errorcode[e.errno] == 'EEXIST':
410 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
412 morgue_file = utils.find_next_free(morgue_file);
413 except utils.tried_too_hard_exc:
414 # Something's either gone badly Pete Tong, or
415 # someone is trying to exploit us.
416 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
418 utils.move(dest_file, morgue_file);
420 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
423 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
427 # If we got here, we own the destination file, so we can
428 # safely overwrite it.
429 utils.move(file, dest_file, 1);
431 ###########################################################################
433 def do_reject (self, manual = 0, reject_message = ""):
440 reason_filename = pkg.changes_file[:-8] + ".reason";
441 reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
443 # Move all the files into the reject directory
444 reject_files = pkg.files.keys() + [pkg.changes_file];
445 self.force_move(reject_files);
447 # If we fail here someone is probably trying to exploit the race
448 # so let's just raise an exception ...
449 if os.path.exists(reject_filename):
450 os.unlink(reject_filename);
451 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
454 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
455 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
456 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
457 os.write(fd, reject_message);
460 # Build up the rejection email
461 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
463 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
464 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
465 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
466 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
468 # Write the rejection email out as the <foo>.reason file
469 os.write(fd, reject_mail_message);
472 # If we weren't given a manual rejection message, spawn an
473 # editor so the user can add one in...
474 if reject_message == "":
475 editor = os.environ.get("EDITOR","vi")
476 result = os.system("%s +6 %s" % (editor, reject_filename))
478 utils.fubar("editor invocation failed for '%s'!" % (reject_filename), result);
480 # Send the rejection mail if appropriate
481 if not Cnf["Dinstall::Options::No-Mail"]:
482 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
483 utils.send_mail (reject_mail_message, "");
485 self.Logger.log(["rejected", pkg.changes_file]);
487 ################################################################################
489 # Ensure that source exists somewhere in the archive for the binary
490 # upload being processed.
492 # (1) exact match => 1.0-3
493 # (2) Bin-only NMU of an MU => 1.0-3.0.1
494 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
496 def source_exists (self, package, source_version):
497 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
499 # Reduce the query results to a list of version numbers
500 ql = map(lambda x: x[0], q.getresult());
503 if ql.count(source_version):
507 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
508 if ql.count(orig_source_version):
512 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
513 if ql.count(orig_source_version):
519 ################################################################################
521 def in_override_p (self, package, component, suite, binary_type, file):
522 files = self.pkg.files;
524 if binary_type == "": # must be source
529 # Override suite name; used for example with proposed-updates
530 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
531 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
533 # Avoid <undef> on unknown distributions
534 suite_id = db_access.get_suite_id(suite);
537 component_id = db_access.get_component_id(component);
538 type_id = db_access.get_override_type_id(type);
540 # FIXME: nasty non-US speficic hack
541 if lower(component[:7]) == "non-us/":
542 component = component[7:];
544 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
545 % (package, suite_id, component_id, type_id));
546 result = q.getresult();
547 # If checking for a source package fall back on the binary override type
548 if type == "dsc" and not result:
549 type_id = db_access.get_override_type_id("deb");
550 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
551 % (package, suite_id, component_id, type_id));
552 result = q.getresult();
554 # Remember the section and priority so we can check them later if appropriate
556 files[file]["override section"] = result[0][0];
557 files[file]["override priority"] = result[0][1];
561 ################################################################################
563 def reject (self, str, prefix="Rejected: "):
565 self.reject_message = self.reject_message + prefix + str + "\n";
567 def check_binaries_against_db(self, file, suite):
568 self.reject_message = "";
569 files = self.pkg.files;
571 # Find any old binary packages
572 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
573 % (files[file]["package"], suite, files[file]["architecture"]))
574 for oldfile in q.dictresult():
575 files[file]["oldfiles"][suite] = oldfile;
576 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
577 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
578 reject("%s Old version `%s' >= new version `%s'." % (file, oldfile["version"], files[file]["version"]));
579 # Check for any existing copies of the file
580 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
581 if q.getresult() != []:
582 reject("can not overwrite existing copy of '%s' already in the archive." % (file));
584 return self.reject_message;
586 ################################################################################
588 def check_source_against_db(self, file):
589 """Ensure source is newer than existing source in target suites."""
590 self.reject_message = "";
591 changes = self.pkg.changes;
594 package = dsc.get("source");
595 new_version = dsc.get("version");
596 for suite in changes["distribution"].keys():
597 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
599 ql = map(lambda x: x[0], q.getresult());
600 for old_version in ql:
601 if apt_pkg.VersionCompare(new_version, old_version) != 1:
602 reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
603 return self.reject_message;
605 ################################################################################
607 def check_dsc_against_db(self, file):
608 self.reject_message = "";
609 files = self.pkg.files;
610 dsc_files = self.pkg.dsc_files;
611 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
614 # Try and find all files mentioned in the .dsc. This has
615 # to work harder to cope with the multiple possible
616 # locations of an .orig.tar.gz.
617 for dsc_file in dsc_files.keys():
618 if files.has_key(dsc_file):
619 actual_md5 = files[dsc_file]["md5sum"];
620 actual_size = int(files[dsc_file]["size"]);
621 found = "%s in incoming" % (dsc_file)
622 # Check the file does not already exist in the archive
623 q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
625 # "It has not broken them. It has fixed a
626 # brokenness. Your crappy hack exploited a bug in
629 # "(Come on! I thought it was always obvious that
630 # one just doesn't release different files with
631 # the same name and version.)"
632 # -- ajk@ on d-devel@l.d.o
634 if q.getresult() != []:
635 reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
636 elif dsc_file[-12:] == ".orig.tar.gz":
638 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
642 # Unfortunately, we make get more than one
643 # match here if, for example, the package was
644 # in potato but had a -sa upload in woody. So
645 # we need to choose the right one.
647 x = ql[0]; # default to something sane in case we don't match any or have only one
651 old_file = i[0] + i[1];
652 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
653 actual_size = os.stat(old_file)[stat.ST_SIZE];
654 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
657 legacy_source_untouchable[i[3]] = "";
659 old_file = x[0] + x[1];
660 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
661 actual_size = os.stat(old_file)[stat.ST_SIZE];
664 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
665 # See install() in katie...
666 self.pkg.orig_tar_id = x[3];
667 if suite_type == "legacy" or suite_type == "legacy-mixed":
668 self.pkg.orig_tar_location = "legacy";
670 self.pkg.orig_tar_location = x[4];
673 # Not there? Check in Incoming...
674 # [See comment above jennifer's process_it() for
675 # explanation of why this is necessary...]
676 orig_tar_gz = self.pkg.directory + '/' + dsc_file;
677 if os.path.exists(orig_tar_gz):
678 return (self.reject_message, orig_tar_gz);
680 reject("%s refers to %s, but I can't find it in Incoming or in the pool." % (file, dsc_file));
683 reject("%s refers to %s, but I can't find it in Incoming." % (file, dsc_file));
685 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
686 reject("md5sum for %s doesn't match %s." % (found, file));
687 if actual_size != int(dsc_files[dsc_file]["size"]):
688 reject("size for %s doesn't match %s." % (found, file));
690 return (self.reject_message, orig_tar_gz);