3 # Utility functions for katie
4 # Copyright (C) 2001 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.17 2002-04-29 22:00:44 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents" ]:
156 if files[file].has_key(i):
157 d_files[file][i] = files[file][i];
159 # Mandatory changes fields
160 for i in [ "distribution", "source", "architecture", "version", "maintainer",
161 "urgency", "fingerprint", "changedby822", "changedbyname",
162 "maintainername", "maintaineremail", "closes" ]:
163 d_changes[i] = changes[i];
164 # Optional changes fields
165 # FIXME: changes should be mandatory
166 for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
167 if changes.has_key(i):
168 d_changes[i] = changes[i];
170 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
174 for file in dsc_files.keys():
175 d_dsc_files[file] = {};
176 # Mandatory dsc_files fields
177 for i in [ "size", "md5sum" ]:
178 d_dsc_files[file][i] = dsc_files[file][i];
179 # Optional dsc_files fields
180 for i in [ "files id" ]:
181 if dsc_files[file].has_key(i):
182 d_dsc_files[file][i] = dsc_files[file][i];
184 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
185 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
189 ###########################################################################
191 # Set up the per-package template substitution mappings
193 def update_subst (self, reject_message = ""):
195 changes = self.pkg.changes;
196 # If jennifer crashed out in the right place, architecture may still be a string.
197 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
198 changes["architecture"] = { "Unknown" : "" };
199 # and maintainer822 may not exist.
200 if not changes.has_key("maintainer822"):
201 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
203 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
204 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
205 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
207 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
208 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
209 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
210 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
211 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
213 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
214 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
215 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
216 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
217 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
219 Subst["__REJECT_MESSAGE__"] = reject_message;
220 Subst["__SOURCE__"] = changes.get("source", "Unknown");
221 Subst["__VERSION__"] = changes.get("version", "Unknown");
223 ###########################################################################
225 def build_summaries(self):
226 changes = self.pkg.changes;
227 files = self.pkg.files;
229 byhand = summary = new = "";
231 # changes["distribution"] may not exist in corner cases
232 # (e.g. unreadable changes files)
233 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
234 changes["distribution"] = {};
236 file_keys = files.keys();
238 for file in file_keys:
239 if files[file].has_key("byhand"):
241 summary = summary + file + " byhand\n"
242 elif files[file].has_key("new"):
244 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
245 if files[file].has_key("othercomponents"):
246 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
247 if files[file]["type"] == "deb":
248 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
250 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
251 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
252 summary = summary + file + "\n to " + destination + "\n"
254 short_summary = summary;
256 # This is for direport's benefit...
257 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
260 summary = summary + "Changes: " + f;
262 summary = summary + self.announce(short_summary, 0)
264 return (summary, short_summary);
266 ###########################################################################
268 def announce (self, short_summary, action):
271 changes = self.pkg.changes;
274 # Only do announcements for source uploads with a recent dpkg-dev installed
275 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
280 Subst["__SHORT_SUMMARY__"] = short_summary;
282 for dist in changes["distribution"].keys():
283 list = Cnf.Find("Suite::%s::Announce" % (dist))
284 if list == "" or lists_done.has_key(list):
287 summary = summary + "Announcing to %s\n" % (list)
290 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
291 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
292 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
293 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
294 utils.send_mail (mail_message, "")
296 bugs = changes["closes"].keys()
298 if not self.nmu.is_an_nmu(self.pkg):
299 summary = summary + "Closing bugs: "
301 summary = summary + "%s " % (bug)
303 Subst["__BUG_NUMBER__"] = bug;
304 if changes["distribution"].has_key("stable"):
305 Subst["__STABLE_WARNING__"] = """
306 Note that this package is not part of the released stable Debian
307 distribution. It may have dependencies on other unreleased software,
308 or other instabilities. Please take care if you wish to install it.
309 The update will eventually make its way into the next released Debian
312 Subst["__STABLE_WARNING__"] = "";
313 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
314 utils.send_mail (mail_message, "")
316 self.Logger.log(["closing bugs"]+bugs);
318 summary = summary + "Setting bugs to severity fixed: "
321 summary = summary + "%s " % (bug)
322 control_message = control_message + "tag %s + fixed\n" % (bug)
323 if action and control_message != "":
324 Subst["__CONTROL_MESSAGE__"] = control_message;
325 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
326 utils.send_mail (mail_message, "")
328 self.Logger.log(["setting bugs to fixed"]+bugs);
329 summary = summary + "\n"
333 ###########################################################################
335 def accept (self, summary, short_summary):
338 files = self.pkg.files;
341 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
343 self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
345 # Move all the files into the accepted directory
346 utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
347 file_keys = files.keys();
348 for file in file_keys:
349 utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
350 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
351 self.accept_count = self.accept_count + 1;
353 # Send accept mail, announce to lists, close bugs and check for
354 # override disparities
355 if not Cnf["Dinstall::Options::No-Mail"]:
356 Subst["__SUITE__"] = "";
357 Subst["__SUMMARY__"] = summary;
358 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
359 utils.send_mail(mail_message, "")
360 self.announce(short_summary, 1)
362 # Special support to enable clean auto-building of accepted packages
363 if Cnf.get("Dinstall::SpecialAcceptedAutoBuild") and \
364 self.pkg.changes["distribution"].has_key("unstable"):
365 self.projectB.query("BEGIN WORK");
366 for file in file_keys:
367 src = os.path.join(Cnf["Dir::QueueAcceptedDir"], file);
368 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
369 # Create a symlink to it
370 os.symlink(src, dest);
371 # Add it to the list of packages for later processing by apt-ftparchive
372 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
373 # If the .orig.tar.gz is in the pool, create a symlink to
374 # it (if one doesn't already exist)
375 if self.pkg.orig_tar_id:
376 # Determine the .orig.tar.gz file name
377 for dsc_file in self.pkg.dsc_files.keys():
378 if dsc_file[-12:] == ".orig.tar.gz":
380 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
381 # If it doesn't exist, create a symlink
382 if not os.path.exists(dest):
383 # Find the .orig.tar.gz in the pool
384 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
387 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
388 src = os.path.join(ql[0][0], ql[0][1]);
389 os.symlink(src, dest);
390 # Add it to the list of packages for later processing by apt-ftparchive
391 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
393 self.projectB.query("COMMIT WORK");
395 ###########################################################################
397 def check_override (self):
399 changes = self.pkg.changes;
400 files = self.pkg.files;
402 # Only check section & priority on sourceful uploads
403 if not changes["architecture"].has_key("source"):
407 for file in files.keys():
408 if not files[file].has_key("new") and files[file]["type"] == "deb":
409 section = files[file]["section"];
410 override_section = files[file]["override section"];
411 if lower(section) != lower(override_section) and section != "-":
412 # Ignore this; it's a common mistake and not worth whining about
413 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
415 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
416 priority = files[file]["priority"];
417 override_priority = files[file]["override priority"];
418 if priority != override_priority and priority != "-":
419 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
424 Subst["__SUMMARY__"] = summary;
425 mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
426 utils.send_mail (mail_message, "");
428 ###########################################################################
430 def force_move (self, files):
431 """Forcefully move files from the current directory to the reject
432 directory. If any file already exists it will be moved to the
433 morgue to make way for the new file."""
438 # Skip any files which don't exist or which we don't have permission to copy.
439 if os.access(file,os.R_OK) == 0:
441 dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
443 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
445 # File exists? Let's try and move it to the morgue
446 if errno.errorcode[e.errno] == 'EEXIST':
447 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
449 morgue_file = utils.find_next_free(morgue_file);
450 except utils.tried_too_hard_exc:
451 # Something's either gone badly Pete Tong, or
452 # someone is trying to exploit us.
453 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
455 utils.move(dest_file, morgue_file, perms=0660);
457 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
460 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
464 # If we got here, we own the destination file, so we can
465 # safely overwrite it.
466 utils.move(file, dest_file, 1, perms=0660);
468 ###########################################################################
470 def do_reject (self, manual = 0, reject_message = ""):
471 # If we weren't given a manual rejection message, spawn an
472 # editor so the user can add one in...
473 if manual and not reject_message:
474 temp_filename = tempfile.mktemp();
475 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
477 editor = os.environ.get("EDITOR","vi")
480 os.system("%s %s" % (editor, temp_filename))
481 file = utils.open_file(temp_filename);
482 reject_message = string.join(file.readlines());
484 print "Reject message:";
485 print utils.prefix_multi_line_string(reject_message," ");
486 prompt = "[R]eject, Edit, Abandon, Quit ?"
488 while string.find(prompt, answer) == -1:
489 answer = utils.our_raw_input(prompt);
490 m = re_default_answer.search(prompt);
493 answer = string.upper(answer[:1]);
494 os.unlink(temp_filename);
506 reason_filename = pkg.changes_file[:-8] + ".reason";
507 reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
509 # Move all the files into the reject directory
510 reject_files = pkg.files.keys() + [pkg.changes_file];
511 self.force_move(reject_files);
513 # If we fail here someone is probably trying to exploit the race
514 # so let's just raise an exception ...
515 if os.path.exists(reject_filename):
516 os.unlink(reject_filename);
517 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
520 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
521 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
522 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
523 os.write(fd, reject_message);
525 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
527 # Build up the rejection email
528 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
530 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
531 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
532 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
533 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
535 # Write the rejection email out as the <foo>.reason file
536 os.write(fd, reject_mail_message);
539 # Send the rejection mail if appropriate
540 if not Cnf["Dinstall::Options::No-Mail"]:
541 utils.send_mail (reject_mail_message, "");
543 self.Logger.log(["rejected", pkg.changes_file]);
546 ################################################################################
548 # Ensure that source exists somewhere in the archive for the binary
549 # upload being processed.
551 # (1) exact match => 1.0-3
552 # (2) Bin-only NMU of an MU => 1.0-3.0.1
553 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
555 def source_exists (self, package, source_version):
556 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
558 # Reduce the query results to a list of version numbers
559 ql = map(lambda x: x[0], q.getresult());
562 if ql.count(source_version):
566 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
567 if ql.count(orig_source_version):
571 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
572 if ql.count(orig_source_version):
578 ################################################################################
580 def in_override_p (self, package, component, suite, binary_type, file):
581 files = self.pkg.files;
583 if binary_type == "": # must be source
588 # Override suite name; used for example with proposed-updates
589 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
590 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
592 # Avoid <undef> on unknown distributions
593 suite_id = db_access.get_suite_id(suite);
596 component_id = db_access.get_component_id(component);
597 type_id = db_access.get_override_type_id(type);
599 # FIXME: nasty non-US speficic hack
600 if lower(component[:7]) == "non-us/":
601 component = component[7:];
603 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
604 % (package, suite_id, component_id, type_id));
605 result = q.getresult();
606 # If checking for a source package fall back on the binary override type
607 if type == "dsc" and not result:
608 type_id = db_access.get_override_type_id("deb");
609 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
610 % (package, suite_id, component_id, type_id));
611 result = q.getresult();
613 # Remember the section and priority so we can check them later if appropriate
615 files[file]["override section"] = result[0][0];
616 files[file]["override priority"] = result[0][1];
620 ################################################################################
622 def reject (self, str, prefix="Rejected: "):
624 # Unlike other rejects we add new lines first to avoid trailing
625 # new lines when this message is passed back up to a caller.
626 if self.reject_message:
627 self.reject_message = self.reject_message + "\n";
628 self.reject_message = self.reject_message + prefix + str;
630 def check_binaries_against_db(self, file, suite):
631 self.reject_message = "";
632 files = self.pkg.files;
634 # Find any old binary packages
635 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
636 % (files[file]["package"], suite, files[file]["architecture"]))
637 for oldfile in q.dictresult():
638 files[file]["oldfiles"][suite] = oldfile;
639 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
640 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
641 self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
642 # Check for any existing copies of the file
643 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
644 if q.getresult() != []:
645 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
647 return self.reject_message;
649 ################################################################################
651 def check_source_against_db(self, file):
652 """Ensure source is newer than existing source in target suites."""
653 self.reject_message = "";
654 changes = self.pkg.changes;
657 package = dsc.get("source");
658 new_version = dsc.get("version");
659 for suite in changes["distribution"].keys():
660 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
662 ql = map(lambda x: x[0], q.getresult());
663 for old_version in ql:
664 if apt_pkg.VersionCompare(new_version, old_version) != 1:
665 self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
666 return self.reject_message;
668 ################################################################################
670 def check_dsc_against_db(self, file):
671 self.reject_message = "";
672 files = self.pkg.files;
673 dsc_files = self.pkg.dsc_files;
674 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
677 # Try and find all files mentioned in the .dsc. This has
678 # to work harder to cope with the multiple possible
679 # locations of an .orig.tar.gz.
680 for dsc_file in dsc_files.keys():
682 if files.has_key(dsc_file):
683 actual_md5 = files[dsc_file]["md5sum"];
684 actual_size = int(files[dsc_file]["size"]);
685 found = "%s in incoming" % (dsc_file)
686 # Check the file does not already exist in the archive
687 q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
689 # "It has not broken them. It has fixed a
690 # brokenness. Your crappy hack exploited a bug in
693 # "(Come on! I thought it was always obvious that
694 # one just doesn't release different files with
695 # the same name and version.)"
696 # -- ajk@ on d-devel@l.d.o
698 if q.getresult() != []:
699 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
700 elif dsc_file[-12:] == ".orig.tar.gz":
702 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
706 # Unfortunately, we make get more than one
707 # match here if, for example, the package was
708 # in potato but had a -sa upload in woody. So
709 # we need to choose the right one.
711 x = ql[0]; # default to something sane in case we don't match any or have only one
715 old_file = i[0] + i[1];
716 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
717 actual_size = os.stat(old_file)[stat.ST_SIZE];
718 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
721 legacy_source_untouchable[i[3]] = "";
723 old_file = x[0] + x[1];
724 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
725 actual_size = os.stat(old_file)[stat.ST_SIZE];
728 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
729 # See install() in katie...
730 self.pkg.orig_tar_id = x[3];
731 if suite_type == "legacy" or suite_type == "legacy-mixed":
732 self.pkg.orig_tar_location = "legacy";
734 self.pkg.orig_tar_location = x[4];
736 # Not there? Check the queue directories...
738 in_unchecked = os.path.join(self.Cnf["Dir::QueueUncheckedDir"],dsc_file);
739 # See process_it() in jennifer for explanation of this
740 if os.path.exists(in_unchecked):
741 return (self.reject_message, in_unchecked);
743 for dir in [ "Accepted", "New", "Byhand" ]:
744 in_otherdir = os.path.join(self.Cnf["Dir::Queue%sDir" % (dir)],dsc_file);
745 if os.path.exists(in_otherdir):
746 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
747 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
751 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
754 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
756 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
757 self.reject("md5sum for %s doesn't match %s." % (found, file));
758 if actual_size != int(dsc_files[dsc_file]["size"]):
759 self.reject("size for %s doesn't match %s." % (found, file));
761 return (self.reject_message, orig_tar_gz);
763 def do_query(self, q):
764 sys.stderr.write("query: \"%s\" ... " % (q));
765 before = time.time();
766 r = self.projectB.query(q);
767 time_diff = time.time()-before;
768 sys.stderr.write("took %.3f seconds.\n" % (time_diff));