3 # Utility functions for katie
4 # Copyright (C) 2001 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.14 2002-04-20 14:24:48 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
28 from string import lower;
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
38 ###############################################################################
40 # Convenience wrapper to carry around all the package information in
43 def __init__(self, **kwds):
44 self.__dict__.update(kwds);
46 def update(self, **kwds):
47 self.__dict__.update(kwds);
49 ###############################################################################
52 # Read in the group maintainer override file
53 def __init__ (self, Cnf):
54 self.group_maint = {};
56 if Cnf.get("Dinstall::GroupOverrideFilename"):
57 filename = Cnf["Dir::OverrideDir"] + Cnf["Dinstall::GroupOverrideFilename"];
58 file = utils.open_file(filename);
59 for line in file.readlines():
60 line = lower(string.strip(utils.re_comments.sub('', line)));
62 self.group_maint[line] = 1;
65 def is_an_nmu (self, pkg):
67 changes = pkg.changes;
70 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == lower(changes["maintainername"]) and \
73 (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = string.split(lower(dsc["uploaders"]), ",");
80 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81 uploadernames[name] = "";
82 if uploadernames.has_key(lower(changes["changedbyname"])):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(lower(changes["maintaineremail"])):
91 ###############################################################################
95 def __init__(self, Cnf):
98 # Read in the group-maint override file
99 self.nmu = nmu_p(Cnf);
100 self.accept_count = 0;
101 self.accept_bytes = 0L;
102 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103 legacy_source_untouchable = {});
105 # Initialize the substitution template mapping global
106 Subst = self.Subst = {};
107 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
112 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113 db_access.init(Cnf, self.projectB);
115 ###########################################################################
117 def init_vars (self):
118 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119 exec "self.pkg.%s.clear();" % (i);
120 self.pkg.orig_tar_id = None;
121 self.pkg.orig_tar_location = "";
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
146 p = cPickle.Pickler(dump_file, 1);
147 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
150 for file in files.keys():
152 for i in [ "package", "version", "architecture", "type", "size",
153 "md5sum", "component", "location id", "source package",
154 "source version", "maintainer", "dbtype", "files id",
155 "new", "section", "priority", "oldfiles", "othercomponents" ]:
156 if files[file].has_key(i):
157 d_files[file][i] = files[file][i];
159 # Mandatory changes fields
160 for i in [ "distribution", "source", "architecture", "version", "maintainer",
161 "urgency", "fingerprint", "changedby822", "changedbyname",
162 "maintainername", "maintaineremail", "closes" ]:
163 d_changes[i] = changes[i];
164 # Optional changes fields
165 # FIXME: changes should be mandatory
166 for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
167 if changes.has_key(i):
168 d_changes[i] = changes[i];
170 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
174 for file in dsc_files.keys():
175 d_dsc_files[file] = {};
176 # Mandatory dsc_files fields
177 for i in [ "size", "md5sum" ]:
178 d_dsc_files[file][i] = dsc_files[file][i];
179 # Optional dsc_files fields
180 for i in [ "files id" ]:
181 if dsc_files[file].has_key(i):
182 d_dsc_files[file][i] = dsc_files[file][i];
184 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
185 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
189 ###########################################################################
191 # Set up the per-package template substitution mappings
193 def update_subst (self, reject_message = ""):
195 changes = self.pkg.changes;
196 # If jennifer crashed out in the right place, architecture may still be a string.
197 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
198 changes["architecture"] = { "Unknown" : "" };
199 # and maintainer822 may not exist.
200 if not changes.has_key("maintainer822"):
201 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
203 Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
204 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
205 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
207 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
208 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
209 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
210 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
211 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
213 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
214 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
215 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
216 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
217 Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
219 Subst["__REJECT_MESSAGE__"] = reject_message;
220 Subst["__SOURCE__"] = changes.get("source", "Unknown");
221 Subst["__VERSION__"] = changes.get("version", "Unknown");
223 ###########################################################################
225 def build_summaries(self):
226 changes = self.pkg.changes;
227 files = self.pkg.files;
229 byhand = summary = new = "";
231 # changes["distribution"] may not exist in corner cases
232 # (e.g. unreadable changes files)
233 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
234 changes["distribution"] = {};
236 file_keys = files.keys();
238 for file in file_keys:
239 if files[file].has_key("byhand"):
241 summary = summary + file + " byhand\n"
242 elif files[file].has_key("new"):
244 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
245 if files[file].has_key("othercomponents"):
246 summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
247 if files[file]["type"] == "deb":
248 summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
250 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
251 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
252 summary = summary + file + "\n to " + destination + "\n"
254 short_summary = summary;
256 # This is for direport's benefit...
257 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
260 summary = summary + "Changes: " + f;
262 summary = summary + self.announce(short_summary, 0)
264 return (summary, short_summary);
266 ###########################################################################
268 def announce (self, short_summary, action):
271 changes = self.pkg.changes;
274 # Only do announcements for source uploads with a recent dpkg-dev installed
275 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
280 Subst["__SHORT_SUMMARY__"] = short_summary;
282 for dist in changes["distribution"].keys():
283 list = Cnf.Find("Suite::%s::Announce" % (dist))
284 if list == "" or lists_done.has_key(list):
287 summary = summary + "Announcing to %s\n" % (list)
290 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
291 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.announce","r").read());
292 utils.send_mail (mail_message, "")
294 bugs = changes["closes"].keys()
296 if not self.nmu.is_an_nmu(self.pkg):
297 summary = summary + "Closing bugs: "
299 summary = summary + "%s " % (bug)
301 Subst["__BUG_NUMBER__"] = bug;
302 if changes["distribution"].has_key("stable"):
303 Subst["__STABLE_WARNING__"] = """
304 Note that this package is not part of the released stable Debian
305 distribution. It may have dependencies on other unreleased software,
306 or other instabilities. Please take care if you wish to install it.
307 The update will eventually make its way into the next released Debian
310 Subst["__STABLE_WARNING__"] = "";
311 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-close","r").read());
312 utils.send_mail (mail_message, "")
314 self.Logger.log(["closing bugs"]+bugs);
316 summary = summary + "Setting bugs to severity fixed: "
319 summary = summary + "%s " % (bug)
320 control_message = control_message + "tag %s + fixed\n" % (bug)
321 if action and control_message != "":
322 Subst["__CONTROL_MESSAGE__"] = control_message;
323 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.bug-nmu-fixed","r").read());
324 utils.send_mail (mail_message, "")
326 self.Logger.log(["setting bugs to fixed"]+bugs);
327 summary = summary + "\n"
331 ###########################################################################
333 def accept (self, summary, short_summary):
336 files = self.pkg.files;
339 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
341 self.dump_vars(Cnf["Dir::QueueAcceptedDir"]);
343 # Move all the files into the accepted directory
344 utils.move(self.pkg.changes_file, Cnf["Dir::QueueAcceptedDir"]);
345 file_keys = files.keys();
346 for file in file_keys:
347 utils.move(file, Cnf["Dir::QueueAcceptedDir"]);
348 self.accept_bytes = self.accept_bytes + float(files[file]["size"])
349 self.accept_count = self.accept_count + 1;
351 # Send accept mail, announce to lists, close bugs and check for
352 # override disparities
353 if not Cnf["Dinstall::Options::No-Mail"]:
354 Subst["__SUITE__"] = "";
355 Subst["__SUMMARY__"] = summary;
356 mail_message = utils.TemplateSubst(Subst,open(Cnf["Dir::TemplatesDir"]+"/jennifer.accepted","r").read());
357 utils.send_mail(mail_message, "")
358 self.announce(short_summary, 1)
360 ## FIXME: this should go away to some Debian specific file
361 # If we're accepting something for unstable do extra work to
362 # make it auto-buildable from accepted
363 if self.pkg.changes["distribution"].has_key("unstable"):
364 self.projectB.query("BEGIN WORK");
365 # Add it to the list of packages for later processing by apt-ftparchive
366 for file in file_keys:
367 if files[file]["type"] == "dsc" or files[file]["type"] == "deb":
368 filename = os.path.join(Cnf["Dir::QueueAcceptedDir"], file);
369 self.projectB.query("INSERT INTO unstable_accepted (filename) VALUES ('%s')" % (filename));
370 # If the .orig.tar.gz is in the pool, create a symlink (if
371 # one doesn't already exist)
372 if self.pkg.orig_tar_id:
373 # Determine the .orig.tar.gz file name
374 for dsc_file in self.pkg.dsc_files.keys():
375 if dsc_file[-12:] == ".orig.tar.gz":
377 dest = os.path.join(Cnf["Dir::QueueAcceptedDir"],filename);
378 # If it doesn't exist, create a symlink
379 if not os.path.exists(dest):
380 # Find the .orig.tar.gz in the pool
381 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
384 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
385 src = os.path.join(ql[0][0], ql[0][1]);
386 os.symlink(src, dest);
387 self.projectB.query("COMMIT WORK");
389 ###########################################################################
391 def check_override (self):
393 changes = self.pkg.changes;
394 files = self.pkg.files;
396 # Only check section & priority on sourceful uploads
397 if not changes["architecture"].has_key("source"):
401 for file in files.keys():
402 if not files[file].has_key("new") and files[file]["type"] == "deb":
403 section = files[file]["section"];
404 override_section = files[file]["override section"];
405 if lower(section) != lower(override_section) and section != "-":
406 # Ignore this; it's a common mistake and not worth whining about
407 if lower(section) == "non-us/main" and lower(override_section) == "non-us":
409 summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
410 priority = files[file]["priority"];
411 override_priority = files[file]["override priority"];
412 if priority != override_priority and priority != "-":
413 summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
418 Subst["__SUMMARY__"] = summary;
419 mail_message = utils.TemplateSubst(Subst,utils.open_file(self.Cnf["Dir::TemplatesDir"]+"/jennifer.override-disparity").read());
420 utils.send_mail (mail_message, "");
422 ###########################################################################
424 def force_move (self, files):
425 """Forcefully move files from the current directory to the reject
426 directory. If any file already exists it will be moved to the
427 morgue to make way for the new file."""
432 # Skip any files which don't exist or which we don't have permission to copy.
433 if os.access(file,os.R_OK) == 0:
435 dest_file = os.path.join(Cnf["Dir::QueueRejectDir"], file);
437 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
439 # File exists? Let's try and move it to the morgue
440 if errno.errorcode[e.errno] == 'EEXIST':
441 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueRejectDir"],file);
443 morgue_file = utils.find_next_free(morgue_file);
444 except utils.tried_too_hard_exc:
445 # Something's either gone badly Pete Tong, or
446 # someone is trying to exploit us.
447 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
449 utils.move(dest_file, morgue_file, perms=0660);
451 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
454 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
458 # If we got here, we own the destination file, so we can
459 # safely overwrite it.
460 utils.move(file, dest_file, 1, perms=0660);
463 ###########################################################################
465 def do_reject (self, manual = 0, reject_message = ""):
472 reason_filename = pkg.changes_file[:-8] + ".reason";
473 reject_filename = Cnf["Dir::QueueRejectDir"] + '/' + reason_filename;
475 # Move all the files into the reject directory
476 reject_files = pkg.files.keys() + [pkg.changes_file];
477 self.force_move(reject_files);
479 # If we fail here someone is probably trying to exploit the race
480 # so let's just raise an exception ...
481 if os.path.exists(reject_filename):
482 os.unlink(reject_filename);
483 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
486 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
487 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
488 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
489 os.write(fd, reject_message);
491 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
493 # Build up the rejection email
494 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
496 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
497 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
498 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
499 reject_mail_message = utils.TemplateSubst(Subst,utils.open_file(Cnf["Dir::TemplatesDir"]+"/katie.rejected").read());
501 # Write the rejection email out as the <foo>.reason file
502 os.write(fd, reject_mail_message);
505 # If we weren't given a manual rejection message, spawn an
506 # editor so the user can add one in...
507 if reject_message == "":
508 editor = os.environ.get("EDITOR","vi")
509 result = os.system("%s +6 %s" % (editor, reject_filename))
511 utils.fubar("editor invocation failed for '%s'!" % (reject_filename), result);
512 reject_mail_message = utils.open_file(reject_filename).read();
514 # Send the rejection mail if appropriate
515 if not Cnf["Dinstall::Options::No-Mail"]:
516 utils.send_mail (reject_mail_message, "");
518 self.Logger.log(["rejected", pkg.changes_file]);
520 ################################################################################
522 # Ensure that source exists somewhere in the archive for the binary
523 # upload being processed.
525 # (1) exact match => 1.0-3
526 # (2) Bin-only NMU of an MU => 1.0-3.0.1
527 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
529 def source_exists (self, package, source_version):
530 q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
532 # Reduce the query results to a list of version numbers
533 ql = map(lambda x: x[0], q.getresult());
536 if ql.count(source_version):
540 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
541 if ql.count(orig_source_version):
545 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
546 if ql.count(orig_source_version):
552 ################################################################################
554 def in_override_p (self, package, component, suite, binary_type, file):
555 files = self.pkg.files;
557 if binary_type == "": # must be source
562 # Override suite name; used for example with proposed-updates
563 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
564 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
566 # Avoid <undef> on unknown distributions
567 suite_id = db_access.get_suite_id(suite);
570 component_id = db_access.get_component_id(component);
571 type_id = db_access.get_override_type_id(type);
573 # FIXME: nasty non-US speficic hack
574 if lower(component[:7]) == "non-us/":
575 component = component[7:];
577 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
578 % (package, suite_id, component_id, type_id));
579 result = q.getresult();
580 # If checking for a source package fall back on the binary override type
581 if type == "dsc" and not result:
582 type_id = db_access.get_override_type_id("deb");
583 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
584 % (package, suite_id, component_id, type_id));
585 result = q.getresult();
587 # Remember the section and priority so we can check them later if appropriate
589 files[file]["override section"] = result[0][0];
590 files[file]["override priority"] = result[0][1];
594 ################################################################################
596 def reject (self, str, prefix="Rejected: "):
598 # Unlike other rejects we add new lines first to avoid trailing
599 # new lines when this message is passed back up to a caller.
600 if self.reject_message:
601 self.reject_message = self.reject_message + "\n";
602 self.reject_message = self.reject_message + prefix + str;
604 def check_binaries_against_db(self, file, suite):
605 self.reject_message = "";
606 files = self.pkg.files;
608 # Find any old binary packages
609 q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
610 % (files[file]["package"], suite, files[file]["architecture"]))
611 for oldfile in q.dictresult():
612 files[file]["oldfiles"][suite] = oldfile;
613 # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
614 if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
615 self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
616 # Check for any existing copies of the file
617 q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
618 if q.getresult() != []:
619 self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
621 return self.reject_message;
623 ################################################################################
625 def check_source_against_db(self, file):
626 """Ensure source is newer than existing source in target suites."""
627 self.reject_message = "";
628 changes = self.pkg.changes;
631 package = dsc.get("source");
632 new_version = dsc.get("version");
633 for suite in changes["distribution"].keys():
634 q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
636 ql = map(lambda x: x[0], q.getresult());
637 for old_version in ql:
638 if apt_pkg.VersionCompare(new_version, old_version) != 1:
639 self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
640 return self.reject_message;
642 ################################################################################
644 def check_dsc_against_db(self, file):
645 self.reject_message = "";
646 files = self.pkg.files;
647 dsc_files = self.pkg.dsc_files;
648 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
651 # Try and find all files mentioned in the .dsc. This has
652 # to work harder to cope with the multiple possible
653 # locations of an .orig.tar.gz.
654 for dsc_file in dsc_files.keys():
656 if files.has_key(dsc_file):
657 actual_md5 = files[dsc_file]["md5sum"];
658 actual_size = int(files[dsc_file]["size"]);
659 found = "%s in incoming" % (dsc_file)
660 # Check the file does not already exist in the archive
661 q = self.projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
663 # "It has not broken them. It has fixed a
664 # brokenness. Your crappy hack exploited a bug in
667 # "(Come on! I thought it was always obvious that
668 # one just doesn't release different files with
669 # the same name and version.)"
670 # -- ajk@ on d-devel@l.d.o
672 if q.getresult() != []:
673 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
674 elif dsc_file[-12:] == ".orig.tar.gz":
676 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
680 # Unfortunately, we make get more than one
681 # match here if, for example, the package was
682 # in potato but had a -sa upload in woody. So
683 # we need to choose the right one.
685 x = ql[0]; # default to something sane in case we don't match any or have only one
689 old_file = i[0] + i[1];
690 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
691 actual_size = os.stat(old_file)[stat.ST_SIZE];
692 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
695 legacy_source_untouchable[i[3]] = "";
697 old_file = x[0] + x[1];
698 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
699 actual_size = os.stat(old_file)[stat.ST_SIZE];
702 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
703 # See install() in katie...
704 self.pkg.orig_tar_id = x[3];
705 if suite_type == "legacy" or suite_type == "legacy-mixed":
706 self.pkg.orig_tar_location = "legacy";
708 self.pkg.orig_tar_location = x[4];
710 # Not there? Check the queue directories...
712 in_unchecked = os.path.join(self.Cnf["Dir::QueueUncheckedDir"],dsc_file);
713 # See process_it() in jennifer for explanation of this
714 if os.path.exists(in_unchecked):
715 return (self.reject_message, in_unchecked);
717 for dir in [ "Accepted", "New", "Byhand" ]:
718 in_otherdir = os.path.join(self.Cnf["Dir::Queue%sDir" % (dir)],dsc_file);
719 if os.path.exists(in_otherdir):
720 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
721 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
725 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
728 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
730 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
731 self.reject("md5sum for %s doesn't match %s." % (found, file));
732 if actual_size != int(dsc_files[dsc_file]["size"]):
733 self.reject("size for %s doesn't match %s." % (found, file));
735 return (self.reject_message, orig_tar_gz);
737 def do_query(self, q):
738 sys.stderr.write("query: \"%s\" ... " % (q));
739 before = time.time();
740 r = self.projectB.query(q);
741 time_diff = time.time()-before;
742 sys.stderr.write("took %.3f seconds.\n" % (time_diff));