]> git.decadent.org.uk Git - dak.git/blob - katie.py
note support
[dak.git] / katie.py
1 #!/usr/bin/env python
2
3 # Utility functions for katie
4 # Copyright (C) 2001, 2002  James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.21 2002-05-18 23:55:07 troup Exp $
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
26
27 from types import *;
28 from string import lower;
29
30 ###############################################################################
31
32 re_isanum = re.compile (r"^\d+$");
33 re_default_answer = re.compile(r"\[(.*)\]");
34 re_fdnic = re.compile("\n\n");
35 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
36 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37
38 ###############################################################################
39
40 # Convenience wrapper to carry around all the package information in
41
42 class Pkg:
43     def __init__(self, **kwds):
44         self.__dict__.update(kwds);
45
46     def update(self, **kwds):
47         self.__dict__.update(kwds);
48
49 ###############################################################################
50
51 class nmu_p:
52     # Read in the group maintainer override file
53     def __init__ (self, Cnf):
54         self.group_maint = {};
55         self.Cnf = Cnf;
56         if Cnf.get("Dinstall::GroupOverrideFilename"):
57             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
58             file = utils.open_file(filename);
59             for line in file.readlines():
60                 line = lower(string.strip(utils.re_comments.sub('', line)));
61                 if line != "":
62                     self.group_maint[line] = 1;
63             file.close();
64
65     def is_an_nmu (self, pkg):
66         Cnf = self.Cnf;
67         changes = pkg.changes;
68         dsc = pkg.dsc;
69
70         (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (lower(dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"])));
71         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72         if dsc_name == lower(changes["maintainername"]) and \
73            (changes["changedby822"] == "" or lower(changes["changedbyname"]) == dsc_name):
74             return 0;
75
76         if dsc.has_key("uploaders"):
77             uploaders = string.split(lower(dsc["uploaders"]), ",");
78             uploadernames = {};
79             for i in uploaders:
80                 (rfc822, name, email) = utils.fix_maintainer (string.strip(i));
81                 uploadernames[name] = "";
82             if uploadernames.has_key(lower(changes["changedbyname"])):
83                 return 0;
84
85         # Some group maintained packages (e.g. Debian QA) are never NMU's
86         if self.group_maint.has_key(lower(changes["maintaineremail"])):
87             return 0;
88
89         return 1;
90
91 ###############################################################################
92
93 class Katie:
94
95     def __init__(self, Cnf):
96         self.Cnf = Cnf;
97         self.values = {};
98         # Read in the group-maint override file
99         self.nmu = nmu_p(Cnf);
100         self.accept_count = 0;
101         self.accept_bytes = 0L;
102         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
103                        legacy_source_untouchable = {});
104
105         # Initialize the substitution template mapping global
106         Subst = self.Subst = {};
107         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
108         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
109         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
110         Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111
112         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
113         db_access.init(Cnf, self.projectB);
114
115     ###########################################################################
116
117     def init_vars (self):
118         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
119             exec "self.pkg.%s.clear();" % (i);
120         self.pkg.orig_tar_id = None;
121         self.pkg.orig_tar_location = "";
122
123     ###########################################################################
124
125     def update_vars (self):
126         dump_filename = self.pkg.changes_file[:-8]+".katie";
127         dump_file = utils.open_file(dump_filename);
128         p = cPickle.Unpickler(dump_file);
129         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130             exec "self.pkg.%s.update(p.load());" % (i);
131         for i in [ "orig_tar_id", "orig_tar_location" ]:
132             exec "self.pkg.%s = p.load();" % (i);
133         dump_file.close();
134
135     ###########################################################################
136
137     # This could just dump the dictionaries as is, but I'd like to avoid
138     # this so there's some idea of what katie & lisa use from jennifer
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i);
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145         dump_file = utils.open_file(dump_filename, 'w');
146         os.chmod(dump_filename, 0660);
147         p = cPickle.Pickler(dump_file, 1);
148         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
149             exec "%s = {}" % i;
150         ## files
151         for file in files.keys():
152             d_files[file] = {};
153             for i in [ "package", "version", "architecture", "type", "size",
154                        "md5sum", "component", "location id", "source package",
155                        "source version", "maintainer", "dbtype", "files id",
156                        "new", "section", "priority", "othercomponents",
157                        "pool name" ]:
158                 if files[file].has_key(i):
159                     d_files[file][i] = files[file][i];
160         ## changes
161         # Mandatory changes fields
162         for i in [ "distribution", "source", "architecture", "version", "maintainer",
163                    "urgency", "fingerprint", "changedby822", "changedbyname",
164                    "maintainername", "maintaineremail", "closes" ]:
165             d_changes[i] = changes[i];
166         # Optional changes fields
167         # FIXME: changes should be mandatory
168         for i in [ "changed-by", "maintainer822", "filecontents", "format",
169                    "changes", "lisa note" ]:
170             if changes.has_key(i):
171                 d_changes[i] = changes[i];
172         ## dsc
173         for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
174             if dsc.has_key(i):
175                 d_dsc[i] = dsc[i];
176         ## dsc_files
177         for file in dsc_files.keys():
178             d_dsc_files[file] = {};
179             # Mandatory dsc_files fields
180             for i in [ "size", "md5sum" ]:
181                 d_dsc_files[file][i] = dsc_files[file][i];
182             # Optional dsc_files fields
183             for i in [ "files id" ]:
184                 if dsc_files[file].has_key(i):
185                     d_dsc_files[file][i] = dsc_files[file][i];
186
187         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
188                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
189             p.dump(i);
190         dump_file.close();
191
192     ###########################################################################
193
194     # Set up the per-package template substitution mappings
195
196     def update_subst (self, reject_message = ""):
197         Subst = self.Subst;
198         changes = self.pkg.changes;
199         # If jennifer crashed out in the right place, architecture may still be a string.
200         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
201             changes["architecture"] = { "Unknown" : "" };
202         # and maintainer822 may not exist.
203         if not changes.has_key("maintainer822"):
204             changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
205
206         Subst["__ARCHITECTURE__"] = string.join(changes["architecture"].keys(), ' ' );
207         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
208         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
209
210         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
211         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
212             Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
213             Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
214             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
215         else:
216             Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
217             Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
218             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
219         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
220             Subst["__MAINTAINER_TO__"] = Subst["__MAINTAINER_TO__"] + "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
221
222         # Apply any global override of the Maintainer field
223         if self.Cnf.get("Dinstall::OverrideMaintainer"):
224             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
225             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
226
227         Subst["__REJECT_MESSAGE__"] = reject_message;
228         Subst["__SOURCE__"] = changes.get("source", "Unknown");
229         Subst["__VERSION__"] = changes.get("version", "Unknown");
230
231     ###########################################################################
232
233     def build_summaries(self):
234         changes = self.pkg.changes;
235         files = self.pkg.files;
236
237         byhand = summary = new = "";
238
239         # changes["distribution"] may not exist in corner cases
240         # (e.g. unreadable changes files)
241         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
242             changes["distribution"] = {};
243
244         file_keys = files.keys();
245         file_keys.sort();
246         for file in file_keys:
247             if files[file].has_key("byhand"):
248                 byhand = 1
249                 summary = summary + file + " byhand\n"
250             elif files[file].has_key("new"):
251                 new = 1
252                 summary = summary + "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
253                 if files[file].has_key("othercomponents"):
254                     summary = summary + "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
255                 if files[file]["type"] == "deb":
256                     summary = summary + apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
257             else:
258                 files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
259                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
260                 summary = summary + file + "\n  to " + destination + "\n"
261
262         short_summary = summary;
263
264         # This is for direport's benefit...
265         f = re_fdnic.sub("\n .\n", changes.get("changes",""));
266
267         if byhand or new:
268             summary = summary + "Changes: " + f;
269
270         summary = summary + self.announce(short_summary, 0)
271
272         return (summary, short_summary);
273
274     ###########################################################################
275
276     def close_bugs (self, summary, action):
277         changes = self.pkg.changes;
278         Subst = self.Subst;
279         Cnf = self.Cnf;
280
281         bugs = changes["closes"].keys();
282
283         if not bugs:
284             return summary;
285
286         bugs.sort();
287         if not self.nmu.is_an_nmu(self.pkg):
288             summary = summary + "Closing bugs: ";
289             for bug in bugs:
290                 summary = summary + "%s " % (bug);
291                 if action:
292                     Subst["__BUG_NUMBER__"] = bug;
293                     if changes["distribution"].has_key("stable"):
294                         Subst["__STABLE_WARNING__"] = """
295 Note that this package is not part of the released stable Debian
296 distribution.  It may have dependencies on other unreleased software,
297 or other instabilities.  Please take care if you wish to install it.
298 The update will eventually make its way into the next released Debian
299 distribution.""";
300                     else:
301                         Subst["__STABLE_WARNING__"] = "";
302                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
303                     utils.send_mail (mail_message, "");
304             if action:
305                 self.Logger.log(["closing bugs"]+bugs);
306         else:                     # NMU
307             summary = summary + "Setting bugs to severity fixed: ";
308             control_message = "";
309             for bug in bugs:
310                 summary = summary + "%s " % (bug);
311                 control_message = control_message + "tag %s + fixed\n" % (bug);
312             if action and control_message != "":
313                 Subst["__CONTROL_MESSAGE__"] = control_message;
314                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
315                 utils.send_mail (mail_message, "");
316             if action:
317                 self.Logger.log(["setting bugs to fixed"]+bugs);
318         summary = summary + "\n";
319         return summary;
320
321     ###########################################################################
322
323     def announce (self, short_summary, action):
324         Subst = self.Subst;
325         Cnf = self.Cnf;
326         changes = self.pkg.changes;
327
328         # Only do announcements for source uploads with a recent dpkg-dev installed
329         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
330             return "";
331
332         lists_done = {};
333         summary = "";
334         Subst["__SHORT_SUMMARY__"] = short_summary;
335
336         for dist in changes["distribution"].keys():
337             list = Cnf.Find("Suite::%s::Announce" % (dist));
338             if list == "" or lists_done.has_key(list):
339                 continue;
340             lists_done[list] = 1;
341             summary = summary + "Announcing to %s\n" % (list);
342
343             if action:
344                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
345                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
346                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
347                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
348                 utils.send_mail (mail_message, "");
349
350         if Cnf.get("Dinstall::CloseBugs"):
351             summary = self.close_bugs(summary, action);
352
353         return summary;
354
355     ###########################################################################
356
357     def accept (self, summary, short_summary):
358         Cnf = self.Cnf;
359         Subst = self.Subst;
360         files = self.pkg.files;
361
362         print "Accepting."
363         self.Logger.log(["Accepting changes",self.pkg.changes_file]);
364
365         self.dump_vars(Cnf["Dir::Queue::Accepted"]);
366
367         # Move all the files into the accepted directory
368         utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
369         file_keys = files.keys();
370         for file in file_keys:
371             utils.move(file, Cnf["Dir::Queue::Accepted"]);
372             self.accept_bytes = self.accept_bytes + float(files[file]["size"])
373         self.accept_count = self.accept_count + 1;
374
375         # Send accept mail, announce to lists, close bugs and check for
376         # override disparities
377         if not Cnf["Dinstall::Options::No-Mail"]:
378             Subst["__SUITE__"] = "";
379             Subst["__SUMMARY__"] = summary;
380             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
381             utils.send_mail(mail_message, "")
382             self.announce(short_summary, 1)
383
384         # Special support to enable clean auto-building of accepted packages
385         if Cnf.FindB("Dinstall::SpecialAcceptedAutoBuild") and \
386            self.pkg.changes["distribution"].has_key("unstable"):
387             self.projectB.query("BEGIN WORK");
388             for file in file_keys:
389                 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
390                 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
391                 # Create a symlink to it
392                 os.symlink(src, dest);
393                 # Add it to the list of packages for later processing by apt-ftparchive
394                 self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
395             # If the .orig.tar.gz is in the pool, create a symlink to
396             # it (if one doesn't already exist)
397             if self.pkg.orig_tar_id:
398                 # Determine the .orig.tar.gz file name
399                 for dsc_file in self.pkg.dsc_files.keys():
400                     if dsc_file[-12:] == ".orig.tar.gz":
401                         filename = dsc_file;
402                 dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
403                 # If it doesn't exist, create a symlink
404                 if not os.path.exists(dest):
405                     # Find the .orig.tar.gz in the pool
406                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
407                     ql = q.getresult();
408                     if not ql:
409                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
410                     src = os.path.join(ql[0][0], ql[0][1]);
411                     os.symlink(src, dest);
412                     # Add it to the list of packages for later processing by apt-ftparchive
413                     self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
414
415             self.projectB.query("COMMIT WORK");
416
417     ###########################################################################
418
419     def check_override (self):
420         Subst = self.Subst;
421         changes = self.pkg.changes;
422         files = self.pkg.files;
423         Cnf = self.Cnf;
424
425         # Abandon the check if:
426         #  a) it's a non-sourceful upload
427         #  b) override disparity checks have been disabled
428         #  c) we're not sending mail
429         if not changes["architecture"].has_key("source") or \
430            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
431            Cnf["Dinstall::Options::No-Mail"]:
432             return;
433
434         summary = "";
435         for file in files.keys():
436             if not files[file].has_key("new") and files[file]["type"] == "deb":
437                 section = files[file]["section"];
438                 override_section = files[file]["override section"];
439                 if lower(section) != lower(override_section) and section != "-":
440                     # Ignore this; it's a common mistake and not worth whining about
441                     if lower(section) == "non-us/main" and lower(override_section) == "non-us":
442                         continue;
443                     summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
444                 priority = files[file]["priority"];
445                 override_priority = files[file]["override priority"];
446                 if priority != override_priority and priority != "-":
447                     summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
448
449         if summary == "":
450             return;
451
452         Subst["__SUMMARY__"] = summary;
453         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
454         utils.send_mail (mail_message, "");
455
456     ###########################################################################
457
458     def force_move (self, files):
459         """Forcefully move files from the current directory to the reject
460            directory.  If any file already exists it will be moved to the
461            morgue to make way for the new file."""
462
463         Cnf = self.Cnf
464
465         for file in files:
466             # Skip any files which don't exist or which we don't have permission to copy.
467             if os.access(file,os.R_OK) == 0:
468                 continue;
469             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
470             try:
471                 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
472             except OSError, e:
473                 # File exists?  Let's try and move it to the morgue
474                 if errno.errorcode[e.errno] == 'EEXIST':
475                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
476                     try:
477                         morgue_file = utils.find_next_free(morgue_file);
478                     except utils.tried_too_hard_exc:
479                         # Something's either gone badly Pete Tong, or
480                         # someone is trying to exploit us.
481                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
482                         return;
483                     utils.move(dest_file, morgue_file, perms=0660);
484                     try:
485                         os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
486                     except OSError, e:
487                         # Likewise
488                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
489                         return;
490                 else:
491                     raise;
492             # If we got here, we own the destination file, so we can
493             # safely overwrite it.
494             utils.move(file, dest_file, 1, perms=0660);
495
496     ###########################################################################
497
498     def do_reject (self, manual = 0, reject_message = ""):
499         # If we weren't given a manual rejection message, spawn an
500         # editor so the user can add one in...
501         if manual and not reject_message:
502             temp_filename = tempfile.mktemp();
503             fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
504             os.close(fd);
505             editor = os.environ.get("EDITOR","vi")
506             answer = 'E';
507             while answer == 'E':
508                 os.system("%s %s" % (editor, temp_filename))
509                 file = utils.open_file(temp_filename);
510                 reject_message = string.join(file.readlines());
511                 file.close();
512                 print "Reject message:";
513                 print utils.prefix_multi_line_string(reject_message,"  ");
514                 prompt = "[R]eject, Edit, Abandon, Quit ?"
515                 answer = "XXX";
516                 while string.find(prompt, answer) == -1:
517                     answer = utils.our_raw_input(prompt);
518                     m = re_default_answer.search(prompt);
519                     if answer == "":
520                         answer = m.group(1);
521                     answer = string.upper(answer[:1]);
522             os.unlink(temp_filename);
523             if answer == 'A':
524                 return 1;
525             elif answer == 'Q':
526                 sys.exit(0);
527
528         print "Rejecting.\n"
529
530         Cnf = self.Cnf;
531         Subst = self.Subst;
532         pkg = self.pkg;
533
534         reason_filename = pkg.changes_file[:-8] + ".reason";
535         reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
536
537         # Move all the files into the reject directory
538         reject_files = pkg.files.keys() + [pkg.changes_file];
539         self.force_move(reject_files);
540
541         # If we fail here someone is probably trying to exploit the race
542         # so let's just raise an exception ...
543         if os.path.exists(reject_filename):
544             os.unlink(reject_filename);
545         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
546
547         if not manual:
548             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
549             Subst["__MANUAL_REJECT_MESSAGE__"] = "";
550             Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
551             os.write(fd, reject_message);
552             os.close(fd);
553             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
554         else:
555             # Build up the rejection email
556             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
557
558             Subst["__REJECTOR_ADDRESS__"] = user_email_address;
559             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
560             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
561             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
562
563             # Write the rejection email out as the <foo>.reason file
564             os.write(fd, reject_mail_message);
565             os.close(fd);
566
567         # Send the rejection mail if appropriate
568         if not Cnf["Dinstall::Options::No-Mail"]:
569             utils.send_mail (reject_mail_message, "");
570
571         self.Logger.log(["rejected", pkg.changes_file]);
572         return 0;
573
574     ################################################################################
575
576     # Ensure that source exists somewhere in the archive for the binary
577     # upload being processed.
578     #
579     # (1) exact match                      => 1.0-3
580     # (2) Bin-only NMU of an MU            => 1.0-3.0.1
581     # (3) Bin-only NMU of a sourceful-NMU  => 1.0-3.1.1
582
583     def source_exists (self, package, source_version):
584         q = self.projectB.query("SELECT s.version FROM source s WHERE s.source = '%s'" % (package));
585
586         # Reduce the query results to a list of version numbers
587         ql = map(lambda x: x[0], q.getresult());
588
589         # Try (1)
590         if ql.count(source_version):
591             return 1;
592
593         # Try (2)
594         orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version);
595         if ql.count(orig_source_version):
596             return 1;
597
598         # Try (3)
599         orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version);
600         if ql.count(orig_source_version):
601             return 1;
602
603         # No source found...
604         return 0;
605
606     ################################################################################
607
608     def in_override_p (self, package, component, suite, binary_type, file):
609         files = self.pkg.files;
610
611         if binary_type == "": # must be source
612             type = "dsc";
613         else:
614             type = binary_type;
615
616         # Override suite name; used for example with proposed-updates
617         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
618             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
619
620         # Avoid <undef> on unknown distributions
621         suite_id = db_access.get_suite_id(suite);
622         if suite_id == -1:
623             return None;
624         component_id = db_access.get_component_id(component);
625         type_id = db_access.get_override_type_id(type);
626
627         # FIXME: nasty non-US speficic hack
628         if lower(component[:7]) == "non-us/":
629             component = component[7:];
630
631         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
632                            % (package, suite_id, component_id, type_id));
633         result = q.getresult();
634         # If checking for a source package fall back on the binary override type
635         if type == "dsc" and not result:
636             type_id = db_access.get_override_type_id("deb");
637             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
638                                % (package, suite_id, component_id, type_id));
639             result = q.getresult();
640
641         # Remember the section and priority so we can check them later if appropriate
642         if result:
643             files[file]["override section"] = result[0][0];
644             files[file]["override priority"] = result[0][1];
645
646         return result;
647
648     ################################################################################
649
650     def reject (self, str, prefix="Rejected: "):
651         if str:
652             # Unlike other rejects we add new lines first to avoid trailing
653             # new lines when this message is passed back up to a caller.
654             if self.reject_message:
655                 self.reject_message = self.reject_message + "\n";
656             self.reject_message = self.reject_message + prefix + str;
657
658     ################################################################################
659
660     def cross_suite_version_check(self, query_result, file, new_version):
661         """Ensure versions are newer than existing packages in target
662         suites and that cross-suite version checking rules as
663         set out in the conf file are satisfied."""
664
665         # Check versions for each target suite
666         for target_suite in self.pkg.changes["distribution"].keys():
667             must_be_newer_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
668             must_be_older_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
669             # Enforce "must be newer than target suite" even if conffile omits it
670             if target_suite not in must_be_newer_than:
671                 must_be_newer_than.append(target_suite);
672             for entry in query_result:
673                 existent_version = entry[0];
674                 suite = entry[1];
675                 if suite in must_be_newer_than and \
676                    apt_pkg.VersionCompare(new_version, existent_version) != 1:
677                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
678                 if suite in must_be_older_than and \
679                    apt_pkg.VersionCompare(new_version, existent_version) != -1:
680                     self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
681
682     ################################################################################
683
684     def check_binary_against_db(self, file):
685         self.reject_message = "";
686         files = self.pkg.files;
687
688         # Ensure version is sane
689         q = self.projectB.query("""
690 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
691                                      architecture a
692  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
693    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
694                                 % (files[file]["package"],
695                                    files[file]["architecture"]));
696         self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
697
698         # Check for any existing copies of the file
699         q = self.projectB.query("""
700 SELECT b.id FROM binaries b, architecture a
701  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
702    AND a.id = b.architecture"""
703                                 % (files[file]["package"],
704                                    files[file]["version"],
705                                    files[file]["architecture"]))
706         if q.getresult():
707             self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
708
709         return self.reject_message;
710
711     ################################################################################
712
713     def check_source_against_db(self, file):
714         self.reject_message = "";
715         dsc = self.pkg.dsc;
716
717         # Ensure version is sane
718         q = self.projectB.query("""
719 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
720  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
721         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
722
723         return self.reject_message;
724
725     ################################################################################
726
727     def check_dsc_against_db(self, file):
728         self.reject_message = "";
729         files = self.pkg.files;
730         dsc_files = self.pkg.dsc_files;
731         legacy_source_untouchable = self.pkg.legacy_source_untouchable;
732         orig_tar_gz = None;
733
734         # Try and find all files mentioned in the .dsc.  This has
735         # to work harder to cope with the multiple possible
736         # locations of an .orig.tar.gz.
737         for dsc_file in dsc_files.keys():
738             found = None;
739             if files.has_key(dsc_file):
740                 actual_md5 = files[dsc_file]["md5sum"];
741                 actual_size = int(files[dsc_file]["size"]);
742                 found = "%s in incoming" % (dsc_file)
743                 # Check the file does not already exist in the archive
744                 q = self.projectB.query("SELECT f.size, f.md5sum FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
745
746                 # "It has not broken them.  It has fixed a
747                 # brokenness.  Your crappy hack exploited a bug in
748                 # the old dinstall.
749                 #
750                 # "(Come on!  I thought it was always obvious that
751                 # one just doesn't release different files with
752                 # the same name and version.)"
753                 #                        -- ajk@ on d-devel@l.d.o
754
755                 ql = q.getresult();
756                 if ql:
757                     # Ignore exact matches for .orig.tar.gz
758                     match = 0;
759                     if dsc_file[-12:] == ".orig.tar.gz":
760                         for i in ql:
761                             if files.has_key(dsc_file) and \
762                                int(files[dsc_file]["size"]) == int(i[0]) and \
763                                files[dsc_file]["md5sum"] == i[1]:
764                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
765                                 del files[dsc_file];
766                                 match = 1;
767
768                     if not match:
769                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
770             elif dsc_file[-12:] == ".orig.tar.gz":
771                 # Check in the pool
772                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
773                 ql = q.getresult();
774
775                 if ql:
776                     # Unfortunately, we make get more than one
777                     # match here if, for example, the package was
778                     # in potato but had a -sa upload in woody.  So
779                     # we need to choose the right one.
780
781                     x = ql[0]; # default to something sane in case we don't match any or have only one
782
783                     if len(ql) > 1:
784                         for i in ql:
785                             old_file = i[0] + i[1];
786                             actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
787                             actual_size = os.stat(old_file)[stat.ST_SIZE];
788                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
789                                 x = i;
790                             else:
791                                 legacy_source_untouchable[i[3]] = "";
792
793                     old_file = x[0] + x[1];
794                     actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
795                     actual_size = os.stat(old_file)[stat.ST_SIZE];
796                     found = old_file;
797                     suite_type = x[2];
798                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
799                     # See install() in katie...
800                     self.pkg.orig_tar_id = x[3];
801                     if suite_type == "legacy" or suite_type == "legacy-mixed":
802                         self.pkg.orig_tar_location = "legacy";
803                     else:
804                         self.pkg.orig_tar_location = x[4];
805                 else:
806                     # Not there? Check the queue directories...
807
808                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
809                     # See process_it() in jennifer for explanation of this
810                     if os.path.exists(in_unchecked):
811                         return (self.reject_message, in_unchecked);
812                     else:
813                         for dir in [ "Accepted", "New", "Byhand" ]:
814                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
815                             if os.path.exists(in_otherdir):
816                                 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
817                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
818                                 found = in_otherdir;
819
820                     if not found:
821                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
822                         continue;
823             else:
824                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
825                 continue;
826             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
827                 self.reject("md5sum for %s doesn't match %s." % (found, file));
828             if actual_size != int(dsc_files[dsc_file]["size"]):
829                 self.reject("size for %s doesn't match %s." % (found, file));
830
831         return (self.reject_message, orig_tar_gz);
832
833     def do_query(self, q):
834         sys.stderr.write("query: \"%s\" ... " % (q));
835         before = time.time();
836         r = self.projectB.query(q);
837         time_diff = time.time()-before;
838         sys.stderr.write("took %.3f seconds.\n" % (time_diff));
839         return r;