3 # Utility functions for katie
4 # Copyright (C) 2001 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.15 2002-04-21 15:38:29 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents" ]:
156 if files[file].has_key(i):
157 d_files[file][i] = files[file][i];
159 # Mandatory changes fields
160 for i in [ "distribution", "source", "architecture", "version", "maintainer",
161 "urgency", "fingerprint", "changedby822", "changedbyname",
162 "maintainername", "maintaineremail", "closes" ]:
163 d_changes[i] = changes[i];
164 # Optional changes fields
165 # FIXME: changes should be mandatory
166 for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
167 if changes.has_key(i):
168 d_changes[i] = changes[i];
170 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
174 for file in dsc_files.keys():
175 d_dsc_files[file] = {};
176 # Mandatory dsc_files fields
177 for i in [ "size", "md5sum" ]:
178 d_dsc_files[file][i] = dsc_files[file][i];
179 # Optional dsc_files fields
180 for i in [ "files id" ]:
181 if dsc_files[file].has_key(i):
182 d_dsc_files[file][i] = dsc_files[file][i];
184 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
185 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
189 ###########################################################################
191 # Set up the per-package template substitution mappings
193 def update_subst (self, reject_message = ""):
195 changes = self.pkg.changes;
196 # If jennifer crashed out in the right place, architecture may still be a string.
197 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
198 changes["architecture"] = { "Unknown" : "" };
199 # and maintainer822 may not exist.
200 if not changes.has_key("maintainer822"):
201 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
203 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
204 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
205 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
207 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
208 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
209 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
210 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
211 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
213 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
214 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
215 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
216 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
217 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
219 Subst["__REJECT_MESSAGE__"] = reject_message;
220 Subst["__SOURCE__"] = changes.get("source", "Unknown");
221 Subst["__VERSION__"] = changes.get("version", "Unknown");
223 ###########################################################################
225 def build_summaries(self):
226 changes = self.pkg.changes;
227 files = self.pkg.files;
229 byhand = summary = new = "";
231 # changes["distribution"] may not exist in corner cases
232 # (e.g. unreadable changes files)
233 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
234 changes["distribution"] = {};
236 file_keys = files.keys();
238 for file in file_keys:
239 if files[file].has_key("byhand"):
241 summary = summary + file + " byhand\n"
242 elif files[file].has_key("new"):
244 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
245 if files[file].has_key("othercomponents"):
246 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
247 if files[file]["type"] == "deb":
248 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
250 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
251 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
252 summary = summary + file + "\n to " + destination + "\n"
254 short_summary = summary;
256 # This is for direport's benefit...
257 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
260 summary = summary + "Changes: " + f;
262 summary = summary + self.announce(short_summary, 0)
264 return (summary, short_summary);
266 ###########################################################################
268 def announce (self, short_summary, action):
271 changes = self.pkg.changes;
274 # Only do announcements for source uploads with a recent dpkg-dev installed
275 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
280 Subst["__SHORT_SUMMARY__"] = short_summary;
282 for dist in changes["distribution"].keys():
283 list = Cnf.Find("Suite::%s::Announce" % (dist))
284 if list == "" or lists_done.has_key(list):
287 summary = summary + "Announcing to %s\n" % (list)
290 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
291 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
292 utils.send_mail (mail_message, "")
294 bugs = changes["closes"].keys()
296 if not self.nmu.is_an_nmu(self.pkg):
297 summary = summary + "Closing bugs: "
299 summary = summary + "%s " % (bug)
301 Subst["__BUG_NUMBER__"] = bug;
302 if changes["distribution"].has_key("stable"):
303 Subst["__STABLE_WARNING__"] = """
304 Note that this package is not part of the released stable Debian
305 distribution. It may have dependencies on other unreleased software,
306 or other instabilities. Please take care if you wish to install it.
307 The update will eventually make its way into the next released Debian
310 Subst["__STABLE_WARNING__"] = "";
311 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
312 utils.send_mail (mail_message, "")
314 self.Logger.log(["closing bugs"]+bugs);
316 summary = summary + "Setting bugs to severity fixed: "
319 summary = summary + "%s " % (bug)
320 control_message = control_message + "tag %s + fixed\n" % (bug)
321 if action and control_message != "":
322 Subst["__CONTROL_MESSAGE__"] = control_message;
323 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
324 utils.send_mail (mail_message, "")
326 self.Logger.log(["setting bugs to fixed"]+bugs);
327 summary = summary + "\n"
331 ###########################################################################
333 def accept (self, summary, short_summary):
336 files = self.pkg.files;
339 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
341 self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
343 # Move all the files into the accepted directory
344 utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
345 file_keys = files.keys();
346 for file in file_keys:
347 utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
348 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
349 self.accept_count = self.accept_count + 1;
351 # Send accept mail, announce to lists, close bugs and check for
352 # override disparities
353 if not Cnf["Dinstall::Options::No-Mail"]:
354 Subst["__SUITE__"] = "";
355 Subst["__SUMMARY__"] = summary;
356 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
357 utils.send_mail(mail_message, "")
358 self.announce(short_summary, 1)
360 ## FIXME: this should go away to some Debian specific file
361 # If we're accepting something for unstable do extra work to
362 # make it auto-buildable from accepted
363 if self.pkg.changes["distribution"].has_key("unstable"):
364 self.projectB.query("BEGIN WORK");
365 # Add it to the list of packages for later processing by apt-ftparchive
366 for file in file_keys:
367 if files[file]["type"] == "dsc" or files[file]["type"] == "deb":
368 filename = os.path.join(Cnf["Dir::QueueAcceptedDir"], file);
369 self.projectB.query("INSERT INTO unstable_accepted (filename) VALUES ('%s')" % (filename));
370 # If the .orig.tar.gz is in the pool, create a symlink (if
371 # one doesn't already exist)
372 if self.pkg.orig_tar_id:
373 # Determine the .orig.tar.gz file name
374 for dsc_file in self.pkg.dsc_files.keys():
375 if dsc_file[-12:] == ".orig.tar.gz":
377 dest = os.path.join(Cnf["Dir::QueueAcceptedDir"],filename);
378 # If it doesn't exist, create a symlink
379 if not os.path.exists(dest):
380 # Find the .orig.tar.gz in the pool
381 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
384 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
385 src = os.path.join(ql[0][0], ql[0][1]);
386 os.symlink(src, dest);
387 self.projectB.query("COMMIT WORK");
389 ###########################################################################
391 def check_override (self):
393 changes = self.pkg.changes;
394 files = self.pkg.files;
396 # Only check section & priority on sourceful uploads
397 if not changes["architecture"].has_key("source"):
401 for file in files.keys():
402 if not files[file].has_key("new") and files[file]["type"] == "deb":
403 section = files[file]["section"];
404 override_section = files[file]["override section"];
405 if lower(section) != lower(override_section) and section != "-":
406 # Ignore this; it's a common mistake and not worth whining about
407 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
409 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
410 priority = files[file]["priority"];
411 override_priority = files[file]["override priority"];
412 if priority != override_priority and priority != "-":
413 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
418 Subst["__SUMMARY__"] = summary;
419 mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
420 utils.send_mail (mail_message, "");
422 ###########################################################################
424 def force_move (self, files):
425 """Forcefully move files from the current directory to the reject
426 directory. If any file already exists it will be moved to the
427 morgue to make way for the new file."""
432 # Skip any files which don't exist or which we don't have permission to copy.
433 if os.access(file,os.R_OK) == 0:
435 dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
437 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
439 # File exists? Let's try and move it to the morgue
440 if errno.errorcode[e.errno] == 'EEXIST':
441 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
443 morgue_file = utils.find_next_free(morgue_file);
444 except utils.tried_too_hard_exc:
445 # Something's either gone badly Pete Tong, or
446 # someone is trying to exploit us.
447 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
449 utils.move(dest_file, morgue_file, perms=0660);
451 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
454 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
458 # If we got here, we own the destination file, so we can
459 # safely overwrite it.
460 utils.move(file, dest_file, 1, perms=0660);
463 ###########################################################################
465 def do_reject (self, manual = 0, reject_message = ""):
466 # If we weren't given a manual rejection message, spawn an
467 # editor so the user can add one in...
468 if manual and not reject_message:
469 temp_filename = tempfile.mktemp();
470 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
472 editor = os.environ.get("EDITOR","vi")
475 os.system("%s %s" % (editor, temp_filename))
476 file = utils.open_file(temp_filename);
477 reject_message = string.join(file.readlines());
479 print "Reject message:";
480 print utils.prefix_multi_line_string(reject_message," ");
481 prompt = "[R]eject, Edit, Abandon, Quit ?"
483 while string.find(prompt, answer) == -1:
484 answer = utils.our_raw_input(prompt);
485 m = re_default_answer.search(prompt);
488 answer = string.upper(answer[:1]);
489 os.unlink(temp_filename);
501 reason_filename = pkg.changes_file[:-8] + ".reason";
502 reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
504 # Move all the files into the reject directory
505 reject_files = pkg.files.keys() + [pkg.changes_file];
506 self.force_move(reject_files);
508 # If we fail here someone is probably trying to exploit the race
509 # so let's just raise an exception ...
510 if os.path.exists(reject_filename):
511 os.unlink(reject_filename);
512 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
515 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
516 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
517 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
518 os.write(fd, reject_message);
520 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
522 # Build up the rejection email
523 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
525 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
526 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
527 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
528 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
530 # Write the rejection email out as the <foo>.reason file
531 os.write(fd, reject_mail_message);
534 # Send the rejection mail if appropriate
535 if not Cnf["Dinstall::Options::No-Mail"]:
536 utils.send_mail (reject_mail_message, "");
538 self.Logger.log(["rejected", pkg.changes_file]);
541 ################################################################################
543 # Ensure that source exists somewhere in the archive for the binary
544 # upload being processed.
546 # (1) exact match => 1.0-3
547 # (2) Bin-only NMU of an MU => 1.0-3.0.1
548 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
550 def source_exists (self, package, source_version):
551 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
553 # Reduce the query results to a list of version numbers
554 ql = map(lambda x: x[0], q.getresult());
557 if ql.count(source_version):
561 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
562 if ql.count(orig_source_version):
566 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
567 if ql.count(orig_source_version):
573 ################################################################################
575 def in_override_p (self, package, component, suite, binary_type, file):
576 files = self.pkg.files;
578 if binary_type == "": # must be source
583 # Override suite name; used for example with proposed-updates
584 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
585 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
587 # Avoid <undef> on unknown distributions
588 suite_id = db_access.get_suite_id(suite);
591 component_id = db_access.get_component_id(component);
592 type_id = db_access.get_override_type_id(type);
594 # FIXME: nasty non-US speficic hack
595 if lower(component[:7]) == "non-us/":
596 component = component[7:];
598 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
599 % (package, suite_id, component_id, type_id));
600 result = q.getresult();
601 # If checking for a source package fall back on the binary override type
602 if type == "dsc" and not result:
603 type_id = db_access.get_override_type_id("deb");
604 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
605 % (package, suite_id, component_id, type_id));
606 result = q.getresult();
608 # Remember the section and priority so we can check them later if appropriate
610 files[file]["override section"] = result[0][0];
611 files[file]["override priority"] = result[0][1];
615 ################################################################################
617 def reject (self, str, prefix="Rejected: "):
619 # Unlike other rejects we add new lines first to avoid trailing
620 # new lines when this message is passed back up to a caller.
621 if self.reject_message:
622 self.reject_message = self.reject_message + "\n";
623 self.reject_message = self.reject_message + prefix + str;
625 def check_binaries_against_db(self, file, suite):
626 self.reject_message = "";
627 files = self.pkg.files;
629 # Find any old binary packages
630 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
631 % (files[file]["package"], suite, files[file]["architecture"]))
632 for oldfile in q.dictresult():
633 files[file]["oldfiles"][suite] = oldfile;
634 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
635 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
636 self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
637 # Check for any existing copies of the file
638 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
639 if q.getresult() != []:
640 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
642 return self.reject_message;
644 ################################################################################
646 def check_source_against_db(self, file):
647 """Ensure source is newer than existing source in target suites."""
648 self.reject_message = "";
649 changes = self.pkg.changes;
652 package = dsc.get("source");
653 new_version = dsc.get("version");
654 for suite in changes["distribution"].keys():
655 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
657 ql = map(lambda x: x[0], q.getresult());
658 for old_version in ql:
659 if apt_pkg.VersionCompare(new_version, old_version) != 1:
660 self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
661 return self.reject_message;
663 ################################################################################
665 def check_dsc_against_db(self, file):
666 self.reject_message = "";
667 files = self.pkg.files;
668 dsc_files = self.pkg.dsc_files;
669 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
672 # Try and find all files mentioned in the .dsc. This has
673 # to work harder to cope with the multiple possible
674 # locations of an .orig.tar.gz.
675 for dsc_file in dsc_files.keys():
677 if files.has_key(dsc_file):
678 actual_md5 = files[dsc_file]["md5sum"];
679 actual_size = int(files[dsc_file]["size"]);
680 found = "%s in incoming" % (dsc_file)
681 # Check the file does not already exist in the archive
682 q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
684 # "It has not broken them. It has fixed a
685 # brokenness. Your crappy hack exploited a bug in
688 # "(Come on! I thought it was always obvious that
689 # one just doesn't release different files with
690 # the same name and version.)"
691 # -- ajk@ on d-devel@l.d.o
693 if q.getresult() != []:
694 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
695 elif dsc_file[-12:] == ".orig.tar.gz":
697 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
701 # Unfortunately, we make get more than one
702 # match here if, for example, the package was
703 # in potato but had a -sa upload in woody. So
704 # we need to choose the right one.
706 x = ql[0]; # default to something sane in case we don't match any or have only one
710 old_file = i[0] + i[1];
711 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
712 actual_size = os.stat(old_file)[stat.ST_SIZE];
713 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
716 legacy_source_untouchable[i[3]] = "";
718 old_file = x[0] + x[1];
719 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
720 actual_size = os.stat(old_file)[stat.ST_SIZE];
723 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
724 # See install() in katie...
725 self.pkg.orig_tar_id = x[3];
726 if suite_type == "legacy" or suite_type == "legacy-mixed":
727 self.pkg.orig_tar_location = "legacy";
729 self.pkg.orig_tar_location = x[4];
731 # Not there? Check the queue directories...
733 in_unchecked = os.path.join(self.Cnf["Dir::QueueUncheckedDir"],dsc_file);
734 # See process_it() in jennifer for explanation of this
735 if os.path.exists(in_unchecked):
736 return (self.reject_message, in_unchecked);
738 for dir in [ "Accepted", "New", "Byhand" ]:
739 in_otherdir = os.path.join(self.Cnf["Dir::Queue%sDir" % (dir)],dsc_file);
740 if os.path.exists(in_otherdir):
741 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
742 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
746 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
749 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
751 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
752 self.reject("md5sum for %s doesn't match %s." % (found, file));
753 if actual_size != int(dsc_files[dsc_file]["size"]):
754 self.reject("size for %s doesn't match %s." % (found, file));
756 return (self.reject_message, orig_tar_gz);
758 def do_query(self, q):
759 sys.stderr.write("query: \"%s\" ... " % (q));
760 before = time.time();
761 r = self.projectB.query(q);
762 time_diff = time.time()-before;
763 sys.stderr.write("took %.3f seconds.\n" % (time_diff));