]> git.decadent.org.uk Git - dak.git/blob - katie.py
cross suite version checking. saner othercomponents support.
[dak.git] / katie.py
1 #!/usr/bin/env python
2
3 # Utility functions for katie
4 # Copyright (C) 2001, 2002  James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.20 2002-05-14 15:35:22 troup Exp $
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
26
27 from types import *;
28 from string import lower;
29
30 ###############################################################################
31
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37
38 ###############################################################################
39
40 # Convenience wrapper to carry around all the package information in
41
42 class Pkg:
43     def __init__(self, **kwds):
44         self.__dict__.update(kwds);
45
46     def update(self, **kwds):
47         self.__dict__.update(kwds);
48
49 ###############################################################################
50
51 class nmu_p:
52     # Read in the group maintainer override file
53     def __init__ (self, Cnf):
54         self.group_maint = {};
55         self.Cnf = Cnf;
56         if Cnf.get("Dinstall::GroupOverrideFilename"):
57             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
58             file = utils.open_file(filename);
59             for line in file.readlines():
60                 line = lower(string.strip(utils.re_comments.sub('', line)));
61                 if line != "":
62                     self.group_maint[line] = 1;
63             file.close();
64
65     def is_an_nmu (self, pkg):
66         Cnf = self.Cnf;
67         changes = pkg.changes;
68         dsc = pkg.dsc;
69
70         (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72         if dsc_name == lower(changes["maintainername"]) and \
73            (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
74             return 0;
75
76         if dsc.has_key("uploaders"):
77             uploaders = string.split(lower(dsc["uploaders"]), ",");
78             uploadernames = {};
79             for i in uploaders:
80                 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81                 uploadernames[name] = "";
82             if uploadernames.has_key(lower(changes["changedbyname"])):
83                 return 0;
84
85         # Some group maintained packages (e.g. Debian QA) are never NMU's
86         if self.group_maint.has_key(lower(changes["maintaineremail"])):
87             return 0;
88
89         return 1;
90
91 ###############################################################################
92
93 class Katie:
94
95     def __init__(self, Cnf):
96         self.Cnf = Cnf;
97         self.values = {};
98         # Read in the group-maint override file
99         self.nmu = nmu_p(Cnf);
100         self.accept_count = 0;
101         self.accept_bytes = 0L;
102         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103                        legacy_source_untouchable = {});
104
105         # Initialize the substitution template mapping global
106         Subst = self.Subst = {};
107         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110         Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111
112         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113         db_access.init(Cnf, self.projectB);
114
115     ###########################################################################
116
117     def init_vars (self):
118         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119             exec "self.pkg.%s.clear();" % (i);
120         self.pkg.orig_tar_id = None;
121         self.pkg.orig_tar_location = "";
122
123     ###########################################################################
124
125     def update_vars (self):
126         dump_filename = self.pkg.changes_file[:-8]+".katie";
127         dump_file = utils.open_file(dump_filename);
128         p = cPickle.Unpickler(dump_file);
129         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130             exec "self.pkg.%s.update(p.load());" % (i);
131         for i in [ "orig_tar_id", "orig_tar_location" ]:
132             exec "self.pkg.%s = p.load();" % (i);
133         dump_file.close();
134
135     ###########################################################################
136
137     # This could just dump the dictionaries as is, but I'd like to avoid
138     # this so there's some idea of what katie & lisa use from jennifer
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i);
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145         dump_file = utils.open_file(dump_filename, 'w');
146         p = cPickle.Pickler(dump_file, 1);
147         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
148             exec "%s = {}" % i;
149         ## files
150         for file in files.keys():
151             d_files[file] = {};
152             for i in [ "package", "version", "architecture", "type", "size",
153                        "md5sum", "component", "location id", "source package",
154                        "source version", "maintainer", "dbtype", "files id",
155                        "new", "section", "priority", "othercomponents",
156                        "pool name" ]:
157                 if files[file].has_key(i):
158                     d_files[file][i] = files[file][i];
159         ## changes
160         # Mandatory changes fields
161         for i in [ "distribution", "source", "architecture", "version", "maintainer",
162                    "urgency", "fingerprint", "changedby822", "changedbyname",
163                    "maintainername", "maintaineremail", "closes" ]:
164             d_changes[i] = changes[i];
165         # Optional changes fields
166         # FIXME: changes should be mandatory
167         for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
168             if changes.has_key(i):
169                 d_changes[i] = changes[i];
170         ## dsc
171         for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
172             if dsc.has_key(i):
173                 d_dsc[i] = dsc[i];
174         ## dsc_files
175         for file in dsc_files.keys():
176             d_dsc_files[file] = {};
177             # Mandatory dsc_files fields
178             for i in [ "size", "md5sum" ]:
179                 d_dsc_files[file][i] = dsc_files[file][i];
180             # Optional dsc_files fields
181             for i in [ "files id" ]:
182                 if dsc_files[file].has_key(i):
183                     d_dsc_files[file][i] = dsc_files[file][i];
184
185         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
186                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
187             p.dump(i);
188         dump_file.close();
189
190     ###########################################################################
191
192     # Set up the per-package template substitution mappings
193
194     def update_subst (self, reject_message = ""):
195         Subst = self.Subst;
196         changes = self.pkg.changes;
197         # If jennifer crashed out in the right place, architecture may still be a string.
198         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
199             changes["architecture"] = { "Unknown" : "" };
200         # and maintainer822 may not exist.
201         if not changes.has_key("maintainer822"):
202             changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
203
204         Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
205         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
206         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
207
208         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
209         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
210             Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
211             Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
212             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
213         else:
214             Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
215             Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
216             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
217         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
218             Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
219
220         # Apply any global override of the Maintainer field
221         if self.Cnf.get("Dinstall::OverrideMaintainer"):
222             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
223             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
224
225         Subst["__REJECT_MESSAGE__"] = reject_message;
226         Subst["__SOURCE__"] = changes.get("source", "Unknown");
227         Subst["__VERSION__"] = changes.get("version", "Unknown");
228
229     ###########################################################################
230
231     def build_summaries(self):
232         changes = self.pkg.changes;
233         files = self.pkg.files;
234
235         byhand = summary = new = "";
236
237         # changes["distribution"] may not exist in corner cases
238         # (e.g. unreadable changes files)
239         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
240             changes["distribution"] = {};
241
242         file_keys = files.keys();
243         file_keys.sort();
244         for file in file_keys:
245             if files[file].has_key("byhand"):
246                 byhand = 1
247                 summary = summary + file + " byhand\n"
248             elif files[file].has_key("new"):
249                 new = 1
250                 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
251                 if files[file].has_key("othercomponents"):
252                     summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
253                 if files[file]["type"] == "deb":
254                     summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
255             else:
256                 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
257                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
258                 summary = summary + file + "\n  to " + destination + "\n"
259
260         short_summary = summary;
261
262         # This is for direport's benefit...
263         f = re_fdnic.sub("\n .\n", changes.get("changes",""));
264
265         if byhand or new:
266             summary = summary + "Changes: " + f;
267
268         summary = summary + self.announce(short_summary, 0)
269
270         return (summary, short_summary);
271
272     ###########################################################################
273
274     def close_bugs (self, summary, action):
275         changes = self.pkg.changes;
276         Subst = self.Subst;
277         Cnf = self.Cnf;
278
279         bugs = changes["closes"].keys();
280
281         if not bugs:
282             return summary;
283
284         bugs.sort();
285         if not self.nmu.is_an_nmu(self.pkg):
286             summary = summary + "Closing bugs: ";
287             for bug in bugs:
288                 summary = summary + "%s " % (bug);
289                 if action:
290                     Subst["__BUG_NUMBER__"] = bug;
291                     if changes["distribution"].has_key("stable"):
292                         Subst["__STABLE_WARNING__"] = """
293 Note that this package is not part of the released stable Debian
294 distribution.  It may have dependencies on other unreleased software,
295 or other instabilities.  Please take care if you wish to install it.
296 The update will eventually make its way into the next released Debian
297 distribution.""";
298                     else:
299                         Subst["__STABLE_WARNING__"] = "";
300                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
301                     utils.send_mail (mail_message, "");
302             if action:
303                 self.Logger.log(["closing bugs"]+bugs);
304         else:                     # NMU
305             summary = summary + "Setting bugs to severity fixed: ";
306             control_message = "";
307             for bug in bugs:
308                 summary = summary + "%s " % (bug);
309                 control_message = control_message + "tag %s + fixed\n" % (bug);
310             if action and control_message != "":
311                 Subst["__CONTROL_MESSAGE__"] = control_message;
312                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
313                 utils.send_mail (mail_message, "");
314             if action:
315                 self.Logger.log(["setting bugs to fixed"]+bugs);
316         summary = summary + "\n";
317         return summary;
318
319     ###########################################################################
320
321     def announce (self, short_summary, action):
322         Subst = self.Subst;
323         Cnf = self.Cnf;
324         changes = self.pkg.changes;
325         dsc = self.pkg.dsc;
326
327         # Only do announcements for source uploads with a recent dpkg-dev installed
328         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
329             return "";
330
331         lists_done = {};
332         summary = "";
333         Subst["__SHORT_SUMMARY__"] = short_summary;
334
335         for dist in changes["distribution"].keys():
336             list = Cnf.Find("Suite::%s::Announce" % (dist));
337             if list == "" or lists_done.has_key(list):
338                 continue;
339             lists_done[list] = 1;
340             summary = summary + "Announcing to %s\n" % (list);
341
342             if action:
343                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
344                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
345                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
346                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
347                 utils.send_mail (mail_message, "");
348
349         if Cnf.get("Dinstall::CloseBugs"):
350             summary = self.close_bugs(summary, action);
351
352         return summary;
353
354     ###########################################################################
355
356     def accept (self, summary, short_summary):
357         Cnf = self.Cnf;
358         Subst = self.Subst;
359         files = self.pkg.files;
360
361         print "Accepting."
362         self.Logger.log(["Accepting changes",self.pkg.changes_file]);
363
364         self.dump_vars(Cnf["Dir::Queue::Accepted"]);
365
366         # Move all the files into the accepted directory
367         utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
368         file_keys = files.keys();
369         for file in file_keys:
370             utils.move(file, Cnf["Dir::Queue::Accepted"]);
371             self.accept_bytes = self.accept_bytes + float(files[file]["size"])
372         self.accept_count = self.accept_count + 1;
373
374         # Send accept mail, announce to lists, close bugs and check for
375         # override disparities
376         if not Cnf["Dinstall::Options::No-Mail"]:
377             Subst["__SUITE__"] = "";
378             Subst["__SUMMARY__"] = summary;
379             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
380             utils.send_mail(mail_message, "")
381             self.announce(short_summary, 1)
382
383         # Special support to enable clean auto-building of accepted packages
384         if Cnf.FindB("Dinstall::SpecialAcceptedAutoBuild") and \
385            self.pkg.changes["distribution"].has_key("unstable"):
386             self.projectB.query("BEGIN WORK");
387             for file in file_keys:
388                 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
389                 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
390                 # Create a symlink to it
391                 os.symlink(src, dest);
392                 # Add it to the list of packages for later processing by apt-ftparchive
393                 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
394             # If the .orig.tar.gz is in the pool, create a symlink to
395             # it (if one doesn't already exist)
396             if self.pkg.orig_tar_id:
397                 # Determine the .orig.tar.gz file name
398                 for dsc_file in self.pkg.dsc_files.keys():
399                     if dsc_file[-12:] == ".orig.tar.gz":
400                         filename = dsc_file;
401                 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
402                 # If it doesn't exist, create a symlink
403                 if not os.path.exists(dest):
404                     # Find the .orig.tar.gz in the pool
405                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
406                     ql = q.getresult();
407                     if not ql:
408                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
409                     src = os.path.join(ql[0][0], ql[0][1]);
410                     os.symlink(src, dest);
411                     # Add it to the list of packages for later processing by apt-ftparchive
412                     self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
413
414             self.projectB.query("COMMIT WORK");
415
416     ###########################################################################
417
418     def check_override (self):
419         Subst = self.Subst;
420         changes = self.pkg.changes;
421         files = self.pkg.files;
422         Cnf = self.Cnf;
423
424         # Abandon the check if:
425         #  a) it's a non-sourceful upload
426         #  b) override disparity checks have been disabled
427         #  c) we're not sending mail
428         if not changes["architecture"].has_key("source") or \
429            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
430            Cnf["Dinstall::Options::No-Mail"]:
431             return;
432
433         summary = "";
434         for file in files.keys():
435             if not files[file].has_key("new") and files[file]["type"] == "deb":
436                 section = files[file]["section"];
437                 override_section = files[file]["override section"];
438                 if lower(section) != lower(override_section) and section != "-":
439                     # Ignore this; it's a common mistake and not worth whining about
440                     if lower(section) == "non-us/main" and lower(override_section) == "non-us":
441                         continue;
442                     summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
443                 priority = files[file]["priority"];
444                 override_priority = files[file]["override priority"];
445                 if priority != override_priority and priority != "-":
446                     summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
447
448         if summary == "":
449             return;
450
451         Subst["__SUMMARY__"] = summary;
452         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
453         utils.send_mail (mail_message, "");
454
455     ###########################################################################
456
457     def force_move (self, files):
458         """Forcefully move files from the current directory to the reject
459            directory.  If any file already exists it will be moved to the
460            morgue to make way for the new file."""
461
462         Cnf = self.Cnf
463
464         for file in files:
465             # Skip any files which don't exist or which we don't have permission to copy.
466             if os.access(file,os.R_OK) == 0:
467                 continue;
468             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
469             try:
470                 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
471             except OSError, e:
472                 # File exists?  Let's try and move it to the morgue
473                 if errno.errorcode[e.errno] == 'EEXIST':
474                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
475                     try:
476                         morgue_file = utils.find_next_free(morgue_file);
477                     except utils.tried_too_hard_exc:
478                         # Something's either gone badly Pete Tong, or
479                         # someone is trying to exploit us.
480                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
481                         return;
482                     utils.move(dest_file, morgue_file, perms=0660);
483                     try:
484                         os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
485                     except OSError, e:
486                         # Likewise
487                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
488                         return;
489                 else:
490                     raise;
491             # If we got here, we own the destination file, so we can
492             # safely overwrite it.
493             utils.move(file, dest_file, 1, perms=0660);
494
495     ###########################################################################
496
497     def do_reject (self, manual = 0, reject_message = ""):
498         # If we weren't given a manual rejection message, spawn an
499         # editor so the user can add one in...
500         if manual and not reject_message:
501             temp_filename = tempfile.mktemp();
502             fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
503             os.close(fd);
504             editor = os.environ.get("EDITOR","vi")
505             answer = 'E';
506             while answer == 'E':
507                 os.system("%s %s" % (editor, temp_filename))
508                 file = utils.open_file(temp_filename);
509                 reject_message = string.join(file.readlines());
510                 file.close();
511                 print "Reject message:";
512                 print utils.prefix_multi_line_string(reject_message,"  ");
513                 prompt = "[R]eject, Edit, Abandon, Quit ?"
514                 answer = "XXX";
515                 while string.find(prompt, answer) == -1:
516                     answer = utils.our_raw_input(prompt);
517                     m = re_default_answer.search(prompt);
518                     if answer == "":
519                         answer = m.group(1);
520                     answer = string.upper(answer[:1]);
521             os.unlink(temp_filename);
522             if answer == 'A':
523                 return 1;
524             elif answer == 'Q':
525                 sys.exit(0);
526
527         print "Rejecting.\n"
528
529         Cnf = self.Cnf;
530         Subst = self.Subst;
531         pkg = self.pkg;
532
533         reason_filename = pkg.changes_file[:-8] + ".reason";
534         reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
535
536         # Move all the files into the reject directory
537         reject_files = pkg.files.keys() + [pkg.changes_file];
538         self.force_move(reject_files);
539
540         # If we fail here someone is probably trying to exploit the race
541         # so let's just raise an exception ...
542         if os.path.exists(reject_filename):
543             os.unlink(reject_filename);
544         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
545
546         if not manual:
547             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
548             Subst["__MANUAL_REJECT_MESSAGE__"] = "";
549             Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
550             os.write(fd, reject_message);
551             os.close(fd);
552             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
553         else:
554             # Build up the rejection email
555             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
556
557             Subst["__REJECTOR_ADDRESS__"] = user_email_address;
558             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
559             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
560             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
561
562             # Write the rejection email out as the <foo>.reason file
563             os.write(fd, reject_mail_message);
564             os.close(fd);
565
566         # Send the rejection mail if appropriate
567         if not Cnf["Dinstall::Options::No-Mail"]:
568             utils.send_mail (reject_mail_message, "");
569
570         self.Logger.log(["rejected", pkg.changes_file]);
571         return 0;
572
573     ################################################################################
574
575     # Ensure that source exists somewhere in the archive for the binary
576     # upload being processed.
577     #
578     # (1) exact match                      => 1.0-3
579     # (2) Bin-only NMU of an MU            => 1.0-3.0.1
580     # (3) Bin-only NMU of a sourceful-NMU  => 1.0-3.1.1
581
582     def source_exists (self, package, source_version):
583         q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
584
585         # Reduce the query results to a list of version numbers
586         ql = map(lambda x: x[0], q.getresult());
587
588         # Try (1)
589         if ql.count(source_version):
590             return 1;
591
592         # Try (2)
593         orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
594         if ql.count(orig_source_version):
595             return 1;
596
597         # Try (3)
598         orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
599         if ql.count(orig_source_version):
600             return 1;
601
602         # No source found...
603         return 0;
604
605     ################################################################################
606
607     def in_override_p (self, package, component, suite, binary_type, file):
608         files = self.pkg.files;
609
610         if binary_type == "": # must be source
611             type = "dsc";
612         else:
613             type = binary_type;
614
615         # Override suite name; used for example with proposed-updates
616         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
617             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
618
619         # Avoid <undef> on unknown distributions
620         suite_id = db_access.get_suite_id(suite);
621         if suite_id == -1:
622             return None;
623         component_id = db_access.get_component_id(component);
624         type_id = db_access.get_override_type_id(type);
625
626         # FIXME: nasty non-US speficic hack
627         if lower(component[:7]) == "non-us/":
628             component = component[7:];
629
630         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
631                            % (package, suite_id, component_id, type_id));
632         result = q.getresult();
633         # If checking for a source package fall back on the binary override type
634         if type == "dsc" and not result:
635             type_id = db_access.get_override_type_id("deb");
636             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
637                                % (package, suite_id, component_id, type_id));
638             result = q.getresult();
639
640         # Remember the section and priority so we can check them later if appropriate
641         if result:
642             files[file]["override section"] = result[0][0];
643             files[file]["override priority"] = result[0][1];
644
645         return result;
646
647     ################################################################################
648
649     def reject (self, str, prefix="Rejected: "):
650         if str:
651             # Unlike other rejects we add new lines first to avoid trailing
652             # new lines when this message is passed back up to a caller.
653             if self.reject_message:
654                 self.reject_message = self.reject_message + "\n";
655             self.reject_message = self.reject_message + prefix + str;
656
657     ################################################################################
658
659     def cross_suite_version_check(self, query_result, file, new_version):
660         """Ensure versions are newer than existing packages in target
661         suites and that cross-suite version checking rules as
662         set out in the conf file are satisfied."""
663
664         # Check versions for each target suite
665         for target_suite in self.pkg.changes["distribution"].keys():
666             must_be_newer_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
667             must_be_older_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
668             # Enforce "must be newer than target suite" even if conffile omits it
669             if target_suite not in must_be_newer_than:
670                 must_be_newer_than.append(target_suite);
671             for entry in query_result:
672                 existent_version = entry[0];
673                 suite = entry[1];
674                 if suite in must_be_newer_than and \
675                    apt_pkg.VersionCompare(new_version, existent_version) != 1:
676                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
677                 if suite in must_be_older_than and \
678                    apt_pkg.VersionCompare(new_version, existent_version) != -1:
679                     self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
680
681     ################################################################################
682
683     def check_binary_against_db(self, file):
684         self.reject_message = "";
685         files = self.pkg.files;
686
687         # Ensure version is sane
688         q = self.projectB.query("""
689 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
690                                      architecture a
691  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
692    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
693                                 % (files[file]["package"],
694                                    files[file]["architecture"]));
695         self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
696
697         # Check for any existing copies of the file
698         q = self.projectB.query("""
699 SELECT b.id FROM binaries b, architecture a
700  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
701    AND a.id = b.architecture"""
702                                 % (files[file]["package"],
703                                    files[file]["version"],
704                                    files[file]["architecture"]))
705         if q.getresult():
706             self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
707
708         return self.reject_message;
709
710     ################################################################################
711
712     def check_source_against_db(self, file):
713         self.reject_message = "";
714         dsc = self.pkg.dsc;
715
716         # Ensure version is sane
717         q = self.projectB.query("""
718 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
719  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
720         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
721
722         return self.reject_message;
723
724     ################################################################################
725
726     def check_dsc_against_db(self, file):
727         self.reject_message = "";
728         files = self.pkg.files;
729         dsc_files = self.pkg.dsc_files;
730         legacy_source_untouchable = self.pkg.legacy_source_untouchable;
731         orig_tar_gz = None;
732
733         # Try and find all files mentioned in the .dsc.  This has
734         # to work harder to cope with the multiple possible
735         # locations of an .orig.tar.gz.
736         for dsc_file in dsc_files.keys():
737             found = None;
738             if files.has_key(dsc_file):
739                 actual_md5 = files[dsc_file]["md5sum"];
740                 actual_size = int(files[dsc_file]["size"]);
741                 found = "%s in incoming" % (dsc_file)
742                 # Check the file does not already exist in the archive
743                 q = self.projectB.query("SELECT f.size, f.md5sum FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
744
745                 # "It has not broken them.  It has fixed a
746                 # brokenness.  Your crappy hack exploited a bug in
747                 # the old dinstall.
748                 #
749                 # "(Come on!  I thought it was always obvious that
750                 # one just doesn't release different files with
751                 # the same name and version.)"
752                 #                        -- ajk@ on d-devel@l.d.o
753
754                 ql = q.getresult();
755                 if ql:
756                     # Ignore exact matches for .orig.tar.gz
757                     match = 0;
758                     if dsc_file[-12:] == ".orig.tar.gz":
759                         for i in ql:
760                             if files.has_key(dsc_file) and \
761                                int(files[dsc_file]["size"]) == int(i[0]) and \
762                                files[dsc_file]["md5sum"] == i[1]:
763                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
764                                 del files[dsc_file];
765                                 match = 1;
766
767                     if not match:
768                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
769             elif dsc_file[-12:] == ".orig.tar.gz":
770                 # Check in the pool
771                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
772                 ql = q.getresult();
773
774                 if ql:
775                     # Unfortunately, we make get more than one
776                     # match here if, for example, the package was
777                     # in potato but had a -sa upload in woody.  So
778                     # we need to choose the right one.
779
780                     x = ql[0]; # default to something sane in case we don't match any or have only one
781
782                     if len(ql) > 1:
783                         for i in ql:
784                             old_file = i[0] + i[1];
785                             actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
786                             actual_size = os.stat(old_file)[stat.ST_SIZE];
787                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
788                                 x = i;
789                             else:
790                                 legacy_source_untouchable[i[3]] = "";
791
792                     old_file = x[0] + x[1];
793                     actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
794                     actual_size = os.stat(old_file)[stat.ST_SIZE];
795                     found = old_file;
796                     suite_type = x[2];
797                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
798                     # See install() in katie...
799                     self.pkg.orig_tar_id = x[3];
800                     if suite_type == "legacy" or suite_type == "legacy-mixed":
801                         self.pkg.orig_tar_location = "legacy";
802                     else:
803                         self.pkg.orig_tar_location = x[4];
804                 else:
805                     # Not there? Check the queue directories...
806
807                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
808                     # See process_it() in jennifer for explanation of this
809                     if os.path.exists(in_unchecked):
810                         return (self.reject_message, in_unchecked);
811                     else:
812                         for dir in [ "Accepted", "New", "Byhand" ]:
813                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
814                             if os.path.exists(in_otherdir):
815                                 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
816                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
817                                 found = in_otherdir;
818
819                     if not found:
820                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
821                         continue;
822             else:
823                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
824                 continue;
825             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
826                 self.reject("md5sum for %s doesn't match %s." % (found, file));
827             if actual_size != int(dsc_files[dsc_file]["size"]):
828                 self.reject("size for %s doesn't match %s." % (found, file));
829
830         return (self.reject_message, orig_tar_gz);
831
832     def do_query(self, q):
833         sys.stderr.write("query: \"%s\" ... " % (q));
834         before = time.time();
835         r = self.projectB.query(q);
836         time_diff = time.time()-before;
837         sys.stderr.write("took %.3f seconds.\n" % (time_diff));
838         return r;