]> git.decadent.org.uk Git - dak.git/blob - katie.py
fix typos in init_pool.sql-security
[dak.git] / katie.py
1 #!/usr/bin/env python
2
3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005  James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.57 2005-12-05 03:45:12 ajt Exp $
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
26
27 from types import *;
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile(r"\n\n");
34 re_bin_only_nmu = re.compile(r"\+b\d+$");
35
36 ###############################################################################
37
38 # Convenience wrapper to carry around all the package information in
39
40 class Pkg:
41     def __init__(self, **kwds):
42         self.__dict__.update(kwds);
43
44     def update(self, **kwds):
45         self.__dict__.update(kwds);
46
47 ###############################################################################
48
49 class nmu_p:
50     # Read in the group maintainer override file
51     def __init__ (self, Cnf):
52         self.group_maint = {};
53         self.Cnf = Cnf;
54         if Cnf.get("Dinstall::GroupOverrideFilename"):
55             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
56             file = utils.open_file(filename);
57             for line in file.readlines():
58                 line = utils.re_comments.sub('', line).lower().strip();
59                 if line != "":
60                     self.group_maint[line] = 1;
61             file.close();
62
63     def is_an_nmu (self, pkg):
64         Cnf = self.Cnf;
65         changes = pkg.changes;
66         dsc = pkg.dsc;
67
68         i = utils.fix_maintainer (dsc.get("maintainer",
69                                           Cnf["Dinstall::MyEmailAddress"]).lower());
70         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
71         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72         if dsc_name == changes["maintainername"].lower() and \
73            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
74             return 0;
75
76         if dsc.has_key("uploaders"):
77             uploaders = dsc["uploaders"].lower().split(",");
78             uploadernames = {};
79             for i in uploaders:
80                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
81                 uploadernames[name] = "";
82             if uploadernames.has_key(changes["changedbyname"].lower()):
83                 return 0;
84
85         # Some group maintained packages (e.g. Debian QA) are never NMU's
86         if self.group_maint.has_key(changes["maintaineremail"].lower()):
87             return 0;
88
89         return 1;
90
91 ###############################################################################
92
93 class Katie:
94
95     def __init__(self, Cnf):
96         self.Cnf = Cnf;
97         # Read in the group-maint override file
98         self.nmu = nmu_p(Cnf);
99         self.accept_count = 0;
100         self.accept_bytes = 0L;
101         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
102                        legacy_source_untouchable = {});
103
104         # Initialize the substitution template mapping global
105         Subst = self.Subst = {};
106         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
107         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
108         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
109         Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
110
111         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
112         db_access.init(Cnf, self.projectB);
113
114     ###########################################################################
115
116     def init_vars (self):
117         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
118             exec "self.pkg.%s.clear();" % (i);
119         self.pkg.orig_tar_id = None;
120         self.pkg.orig_tar_location = "";
121         self.pkg.orig_tar_gz = None;
122
123     ###########################################################################
124
125     def update_vars (self):
126         dump_filename = self.pkg.changes_file[:-8]+".katie";
127         dump_file = utils.open_file(dump_filename);
128         p = cPickle.Unpickler(dump_file);
129         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130             exec "self.pkg.%s.update(p.load());" % (i);
131         for i in [ "orig_tar_id", "orig_tar_location" ]:
132             exec "self.pkg.%s = p.load();" % (i);
133         dump_file.close();
134
135     ###########################################################################
136
137     # This could just dump the dictionaries as is, but I'd like to avoid
138     # this so there's some idea of what katie & lisa use from jennifer
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i);
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145         dump_file = utils.open_file(dump_filename, 'w');
146         try:
147             os.chmod(dump_filename, 0660);
148         except OSError, e:
149             if errno.errorcode[e.errno] == 'EPERM':
150                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
151                 if perms & stat.S_IROTH:
152                     utils.fubar("%s is world readable and chmod failed." % (dump_filename));
153             else:
154                 raise;
155
156         p = cPickle.Pickler(dump_file, 1);
157         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158             exec "%s = {}" % i;
159         ## files
160         for file in files.keys():
161             d_files[file] = {};
162             for i in [ "package", "version", "architecture", "type", "size",
163                        "md5sum", "component", "location id", "source package",
164                        "source version", "maintainer", "dbtype", "files id",
165                        "new", "section", "priority", "othercomponents",
166                        "pool name", "original component" ]:
167                 if files[file].has_key(i):
168                     d_files[file][i] = files[file][i];
169         ## changes
170         # Mandatory changes fields
171         for i in [ "distribution", "source", "architecture", "version",
172                    "maintainer", "urgency", "fingerprint", "changedby822",
173                    "changedby2047", "changedbyname", "maintainer822",
174                    "maintainer2047", "maintainername", "maintaineremail",
175                    "closes", "changes" ]:
176             d_changes[i] = changes[i];
177         # Optional changes fields
178         for i in [ "changed-by", "filecontents", "format", "lisa note", "distribution-version" ]:
179             if changes.has_key(i):
180                 d_changes[i] = changes[i];
181         ## dsc
182         for i in [ "source", "version", "maintainer", "fingerprint",
183                    "uploaders", "bts changelog" ]:
184             if dsc.has_key(i):
185                 d_dsc[i] = dsc[i];
186         ## dsc_files
187         for file in dsc_files.keys():
188             d_dsc_files[file] = {};
189             # Mandatory dsc_files fields
190             for i in [ "size", "md5sum" ]:
191                 d_dsc_files[file][i] = dsc_files[file][i];
192             # Optional dsc_files fields
193             for i in [ "files id" ]:
194                 if dsc_files[file].has_key(i):
195                     d_dsc_files[file][i] = dsc_files[file][i];
196
197         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199             p.dump(i);
200         dump_file.close();
201
202     ###########################################################################
203
204     # Set up the per-package template substitution mappings
205
206     def update_subst (self, reject_message = ""):
207         Subst = self.Subst;
208         changes = self.pkg.changes;
209         # If jennifer crashed out in the right place, architecture may still be a string.
210         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211             changes["architecture"] = { "Unknown" : "" };
212         # and maintainer2047 may not exist.
213         if not changes.has_key("maintainer2047"):
214             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
215
216         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
217         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
218         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
219
220         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
223             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224                                                      changes["maintainer2047"]);
225             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
226         else:
227             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
228             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
229             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
230         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232
233         # Apply any global override of the Maintainer field
234         if self.Cnf.get("Dinstall::OverrideMaintainer"):
235             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
236             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
237
238         Subst["__REJECT_MESSAGE__"] = reject_message;
239         Subst["__SOURCE__"] = changes.get("source", "Unknown");
240         Subst["__VERSION__"] = changes.get("version", "Unknown");
241
242     ###########################################################################
243
244     def build_summaries(self):
245         changes = self.pkg.changes;
246         files = self.pkg.files;
247
248         byhand = summary = new = "";
249
250         # changes["distribution"] may not exist in corner cases
251         # (e.g. unreadable changes files)
252         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253             changes["distribution"] = {};
254
255         file_keys = files.keys();
256         file_keys.sort();
257         for file in file_keys:
258             if files[file].has_key("byhand"):
259                 byhand = 1
260                 summary += file + " byhand\n"
261             elif files[file].has_key("new"):
262                 new = 1
263                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
264                 if files[file].has_key("othercomponents"):
265                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
266                 if files[file]["type"] == "deb":
267                     deb_fh = utils.open_file(file)
268                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n';
269                     deb_fh.close()
270             else:
271                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
272                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
273                 summary += file + "\n  to " + destination + "\n"
274
275         short_summary = summary;
276
277         # This is for direport's benefit...
278         f = re_fdnic.sub("\n .\n", changes.get("changes",""));
279
280         if byhand or new:
281             summary += "Changes: " + f;
282
283         summary += self.announce(short_summary, 0)
284
285         return (summary, short_summary);
286
287     ###########################################################################
288
289     def close_bugs (self, summary, action):
290         changes = self.pkg.changes;
291         Subst = self.Subst;
292         Cnf = self.Cnf;
293
294         bugs = changes["closes"].keys();
295
296         if not bugs:
297             return summary;
298
299         bugs.sort();
300         if not self.nmu.is_an_nmu(self.pkg):
301             if changes["distribution"].has_key("experimental"):
302                 # tag bugs as fixed-in-experimental for uploads to experimental
303                 summary += "Setting bugs to severity fixed: ";
304                 control_message = "";
305                 for bug in bugs:
306                     summary += "%s " % (bug);
307                     control_message += "tag %s + fixed-in-experimental\n" % (bug);
308                 if action and control_message != "":
309                     Subst["__CONTROL_MESSAGE__"] = control_message;
310                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
311                     utils.send_mail (mail_message);
312                 if action:
313                     self.Logger.log(["setting bugs to fixed"]+bugs);
314
315
316             else:
317                 summary += "Closing bugs: ";
318                 for bug in bugs:
319                     summary += "%s " % (bug);
320                     if action:
321                         Subst["__BUG_NUMBER__"] = bug;
322                         if changes["distribution"].has_key("stable"):
323                             Subst["__STABLE_WARNING__"] = """
324 Note that this package is not part of the released stable Debian
325 distribution.  It may have dependencies on other unreleased software,
326 or other instabilities.  Please take care if you wish to install it.
327 The update will eventually make its way into the next released Debian
328 distribution.""";
329                         else:
330                             Subst["__STABLE_WARNING__"] = "";
331                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
332                             utils.send_mail (mail_message);
333                 if action:
334                     self.Logger.log(["closing bugs"]+bugs);
335
336         else:                     # NMU
337             summary += "Setting bugs to severity fixed: ";
338             control_message = "";
339             for bug in bugs:
340                 summary += "%s " % (bug);
341                 control_message += "tag %s + fixed\n" % (bug);
342             if action and control_message != "":
343                 Subst["__CONTROL_MESSAGE__"] = control_message;
344                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
345                 utils.send_mail (mail_message);
346             if action:
347                 self.Logger.log(["setting bugs to fixed"]+bugs);
348         summary += "\n";
349         return summary;
350
351     ###########################################################################
352
353     def announce (self, short_summary, action):
354         Subst = self.Subst;
355         Cnf = self.Cnf;
356         changes = self.pkg.changes;
357
358         # Only do announcements for source uploads with a recent dpkg-dev installed
359         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
360             return "";
361
362         lists_done = {};
363         summary = "";
364         Subst["__SHORT_SUMMARY__"] = short_summary;
365
366         for dist in changes["distribution"].keys():
367             list = Cnf.Find("Suite::%s::Announce" % (dist));
368             if list == "" or lists_done.has_key(list):
369                 continue;
370             lists_done[list] = 1;
371             summary += "Announcing to %s\n" % (list);
372
373             if action:
374                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
375                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
376                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
377                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
378                 utils.send_mail (mail_message);
379
380         if Cnf.FindB("Dinstall::CloseBugs"):
381             summary = self.close_bugs(summary, action);
382
383         return summary;
384
385     ###########################################################################
386
387     def accept (self, summary, short_summary):
388         Cnf = self.Cnf;
389         Subst = self.Subst;
390         files = self.pkg.files;
391         changes = self.pkg.changes;
392         changes_file = self.pkg.changes_file;
393         dsc = self.pkg.dsc;
394
395         print "Accepting."
396         self.Logger.log(["Accepting changes",changes_file]);
397
398         self.dump_vars(Cnf["Dir::Queue::Accepted"]);
399
400         # Move all the files into the accepted directory
401         utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
402         file_keys = files.keys();
403         for file in file_keys:
404             utils.move(file, Cnf["Dir::Queue::Accepted"]);
405             self.accept_bytes += float(files[file]["size"])
406         self.accept_count += 1;
407
408         # Send accept mail, announce to lists, close bugs and check for
409         # override disparities
410         if not Cnf["Dinstall::Options::No-Mail"]:
411             Subst["__SUITE__"] = "";
412             Subst["__SUMMARY__"] = summary;
413             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
414             utils.send_mail(mail_message)
415             self.announce(short_summary, 1)
416
417
418         ## Helper stuff for DebBugs Version Tracking
419         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
420             # ??? once queue/* is cleared on *.d.o and/or reprocessed
421             # the conditionalization on dsc["bts changelog"] should be
422             # dropped.
423
424             # Write out the version history from the changelog
425             if changes["architecture"].has_key("source") and \
426                dsc.has_key("bts changelog"):
427
428                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
429                                                     dotprefix=1, perms=0644);
430                 version_history = utils.open_file(temp_filename, 'w');
431                 version_history.write(dsc["bts changelog"]);
432                 version_history.close();
433                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
434                                       changes_file[:-8]+".versions");
435                 os.rename(temp_filename, filename);
436
437             # Write out the binary -> source mapping.
438             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
439                                                 dotprefix=1, perms=0644);
440             debinfo = utils.open_file(temp_filename, 'w');
441             for file in file_keys:
442                 f = files[file];
443                 if f["type"] == "deb":
444                     line = " ".join([f["package"], f["version"],
445                                      f["architecture"], f["source package"],
446                                      f["source version"]]);
447                     debinfo.write(line+"\n");
448             debinfo.close();
449             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
450                                   changes_file[:-8]+".debinfo");
451             os.rename(temp_filename, filename);
452
453         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
454
455     ###########################################################################
456
457     def queue_build (self, queue, path):
458         ## Special support to enable clean auto-building of queued packages
459         queue_id = get_or_set_queue_id(queue)
460
461         self.projectB.query("BEGIN WORK");
462         for suite in changes["distribution"].keys():
463             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
464                 continue;
465             suite_id = db_access.get_suite_id(suite);
466             dest_dir = Cnf["Dir::QueueBuild"];
467             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
468                 dest_dir = os.path.join(dest_dir, suite);
469             for file in file_keys:
470                 src = os.path.join(path, file);
471                 dest = os.path.join(dest_dir, file);
472                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
473                     # Copy it since the original won't be readable by www-data
474                     utils.copy(src, dest);
475                 else:
476                     # Create a symlink to it
477                     os.symlink(src, dest);
478                 # Add it to the list of packages for later processing by apt-ftparchive
479                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, queue_id, '%s', 't')" % (suite_id, dest));
480             # If the .orig.tar.gz is in the pool, create a symlink to
481             # it (if one doesn't already exist)
482             if self.pkg.orig_tar_id:
483                 # Determine the .orig.tar.gz file name
484                 for dsc_file in self.pkg.dsc_files.keys():
485                     if dsc_file.endswith(".orig.tar.gz"):
486                         filename = dsc_file;
487                 dest = os.path.join(dest_dir, filename);
488                 # If it doesn't exist, create a symlink
489                 if not os.path.exists(dest):
490                     # Find the .orig.tar.gz in the pool
491                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
492                     ql = q.getresult();
493                     if not ql:
494                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
495                     src = os.path.join(ql[0][0], ql[0][1]);
496                     os.symlink(src, dest);
497                     # Add it to the list of packages for later processing by apt-ftparchive
498                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, queue_id, '%s', 't')" % (suite_id, dest));
499                 # if it does, update things to ensure it's not removed prematurely
500                 else:
501                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
502
503         self.projectB.query("COMMIT WORK");
504
505     ###########################################################################
506
507     def check_override (self):
508         Subst = self.Subst;
509         changes = self.pkg.changes;
510         files = self.pkg.files;
511         Cnf = self.Cnf;
512
513         # Abandon the check if:
514         #  a) it's a non-sourceful upload
515         #  b) override disparity checks have been disabled
516         #  c) we're not sending mail
517         if not changes["architecture"].has_key("source") or \
518            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
519            Cnf["Dinstall::Options::No-Mail"]:
520             return;
521
522         summary = "";
523         file_keys = files.keys();
524         file_keys.sort();
525         for file in file_keys:
526             if not files[file].has_key("new") and files[file]["type"] == "deb":
527                 section = files[file]["section"];
528                 override_section = files[file]["override section"];
529                 if section.lower() != override_section.lower() and section != "-":
530                     # Ignore this; it's a common mistake and not worth whining about
531                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
532                         continue;
533                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
534                 priority = files[file]["priority"];
535                 override_priority = files[file]["override priority"];
536                 if priority != override_priority and priority != "-":
537                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
538
539         if summary == "":
540             return;
541
542         Subst["__SUMMARY__"] = summary;
543         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
544         utils.send_mail(mail_message);
545
546     ###########################################################################
547
548     def force_reject (self, files):
549         """Forcefully move files from the current directory to the
550            reject directory.  If any file already exists in the reject
551            directory it will be moved to the morgue to make way for
552            the new file."""
553
554         Cnf = self.Cnf
555
556         for file in files:
557             # Skip any files which don't exist or which we don't have permission to copy.
558             if os.access(file,os.R_OK) == 0:
559                 continue;
560             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
561             try:
562                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
563             except OSError, e:
564                 # File exists?  Let's try and move it to the morgue
565                 if errno.errorcode[e.errno] == 'EEXIST':
566                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
567                     try:
568                         morgue_file = utils.find_next_free(morgue_file);
569                     except utils.tried_too_hard_exc:
570                         # Something's either gone badly Pete Tong, or
571                         # someone is trying to exploit us.
572                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
573                         return;
574                     utils.move(dest_file, morgue_file, perms=0660);
575                     try:
576                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
577                     except OSError, e:
578                         # Likewise
579                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
580                         return;
581                 else:
582                     raise;
583             # If we got here, we own the destination file, so we can
584             # safely overwrite it.
585             utils.move(file, dest_file, 1, perms=0660);
586             os.close(dest_fd)
587
588     ###########################################################################
589
590     def do_reject (self, manual = 0, reject_message = ""):
591         # If we weren't given a manual rejection message, spawn an
592         # editor so the user can add one in...
593         if manual and not reject_message:
594             temp_filename = utils.temp_filename();
595             editor = os.environ.get("EDITOR","vi")
596             answer = 'E';
597             while answer == 'E':
598                 os.system("%s %s" % (editor, temp_filename))
599                 temp_fh = utils.open_file(temp_filename);
600                 reject_message = "".join(temp_fh.readlines());
601                 temp_fh.close();
602                 print "Reject message:";
603                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1);
604                 prompt = "[R]eject, Edit, Abandon, Quit ?"
605                 answer = "XXX";
606                 while prompt.find(answer) == -1:
607                     answer = utils.our_raw_input(prompt);
608                     m = re_default_answer.search(prompt);
609                     if answer == "":
610                         answer = m.group(1);
611                     answer = answer[:1].upper();
612             os.unlink(temp_filename);
613             if answer == 'A':
614                 return 1;
615             elif answer == 'Q':
616                 sys.exit(0);
617
618         print "Rejecting.\n"
619
620         Cnf = self.Cnf;
621         Subst = self.Subst;
622         pkg = self.pkg;
623
624         reason_filename = pkg.changes_file[:-8] + ".reason";
625         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
626
627         # Move all the files into the reject directory
628         reject_files = pkg.files.keys() + [pkg.changes_file];
629         self.force_reject(reject_files);
630
631         # If we fail here someone is probably trying to exploit the race
632         # so let's just raise an exception ...
633         if os.path.exists(reason_filename):
634             os.unlink(reason_filename);
635         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
636
637         if not manual:
638             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
639             Subst["__MANUAL_REJECT_MESSAGE__"] = "";
640             Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
641             os.write(reason_fd, reject_message);
642             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
643         else:
644             # Build up the rejection email
645             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
646
647             Subst["__REJECTOR_ADDRESS__"] = user_email_address;
648             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
649             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
650             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
651             # Write the rejection email out as the <foo>.reason file
652             os.write(reason_fd, reject_mail_message);
653
654         os.close(reason_fd)
655
656         # Send the rejection mail if appropriate
657         if not Cnf["Dinstall::Options::No-Mail"]:
658             utils.send_mail(reject_mail_message);
659
660         self.Logger.log(["rejected", pkg.changes_file]);
661         return 0;
662
663     ################################################################################
664
665     # Ensure that source exists somewhere in the archive for the binary
666     # upload being processed.
667     #
668     # (1) exact match                      => 1.0-3
669     # (2) Bin-only NMU of an MU            => 1.0-3.0.1
670     # (3) Bin-only NMU of a sourceful-NMU  => 1.0-3.1.1
671
672     def source_exists (self, package, source_version, suites = ["any"]):
673         okay = 1
674         for suite in suites:
675             if suite == "any":
676                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
677                     (package)
678             else:
679                 # source must exist in suite X, or in some other suite that's
680                 # mapped to X, recursively... silent-maps are counted too,
681                 # unreleased-maps aren't.
682                 maps = self.Cnf.ValueList("SuiteMappings")[:]
683                 maps.reverse()
684                 maps = [ m.split() for m in maps ]
685                 maps = [ (x[1], x[2]) for x in maps
686                                 if x[0] == "map" or x[0] == "silent-map" ]
687                 s = [suite]
688                 for x in maps:
689                         if x[1] in s and x[0] not in s:
690                                 s.append(x[0])
691
692                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
693             q = self.projectB.query(que)
694
695             # Reduce the query results to a list of version numbers
696             ql = map(lambda x: x[0], q.getresult());
697
698             # Try (1)
699             if source_version in ql:
700                 continue
701
702             # Try (2)
703             orig_source_version = re_bin_only_nmu.sub('', source_version)
704             if orig_source_version in ql:
705                 continue
706
707             # No source found...
708             okay = 0
709         return okay
710
711     ################################################################################
712     
713     def in_override_p (self, package, component, suite, binary_type, file):
714         files = self.pkg.files;
715
716         if binary_type == "": # must be source
717             type = "dsc";
718         else:
719             type = binary_type;
720
721         # Override suite name; used for example with proposed-updates
722         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
723             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
724
725         # Avoid <undef> on unknown distributions
726         suite_id = db_access.get_suite_id(suite);
727         if suite_id == -1:
728             return None;
729         component_id = db_access.get_component_id(component);
730         type_id = db_access.get_override_type_id(type);
731
732         # FIXME: nasty non-US speficic hack
733         if component.lower().startswith("non-us/"):
734             component = component[7:];
735
736         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
737                            % (package, suite_id, component_id, type_id));
738         result = q.getresult();
739         # If checking for a source package fall back on the binary override type
740         if type == "dsc" and not result:
741             deb_type_id = db_access.get_override_type_id("deb");
742             udeb_type_id = db_access.get_override_type_id("udeb");
743             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
744                                % (package, suite_id, component_id, deb_type_id, udeb_type_id));
745             result = q.getresult();
746
747         # Remember the section and priority so we can check them later if appropriate
748         if result:
749             files[file]["override section"] = result[0][0];
750             files[file]["override priority"] = result[0][1];
751
752         return result;
753
754     ################################################################################
755
756     def reject (self, str, prefix="Rejected: "):
757         if str:
758             # Unlike other rejects we add new lines first to avoid trailing
759             # new lines when this message is passed back up to a caller.
760             if self.reject_message:
761                 self.reject_message += "\n";
762             self.reject_message += prefix + str;
763
764     ################################################################################
765
766     def get_anyversion(self, query_result, suite):
767         anyversion=None
768         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
769         for (v, s) in query_result:
770             if s in [ string.lower(x) for x in anysuite ]:
771                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
772                     anyversion=v
773         return anyversion
774
775     ################################################################################
776
777     def cross_suite_version_check(self, query_result, file, new_version):
778         """Ensure versions are newer than existing packages in target
779         suites and that cross-suite version checking rules as
780         set out in the conf file are satisfied."""
781
782         # Check versions for each target suite
783         for target_suite in self.pkg.changes["distribution"].keys():
784             must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
785             must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
786             # Enforce "must be newer than target suite" even if conffile omits it
787             if target_suite not in must_be_newer_than:
788                 must_be_newer_than.append(target_suite);
789             for entry in query_result:
790                 existent_version = entry[0];
791                 suite = entry[1];
792                 if suite in must_be_newer_than and \
793                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
794                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
795                 if suite in must_be_older_than and \
796                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
797                     ch = self.pkg.changes
798                     cansave = 0
799                     if ch.get('distribution-version', {}).has_key(suite):
800                         # we really use the other suite, ignoring the conflicting one ...
801                         addsuite = ch["distribution-version"][suite]
802                     
803                         add_version = self.get_anyversion(query_result, addsuite)
804                         target_version = self.get_anyversion(query_result, target_suite)
805                     
806                         if not add_version:
807                             # not add_version can only happen if we map to a suite
808                             # that doesn't enhance the suite we're propup'ing from.
809                             # so "propup-ver x a b c; map a d" is a problem only if
810                             # d doesn't enhance a.
811                             #
812                             # i think we could always propagate in this case, rather
813                             # than complaining. either way, this isn't a REJECT issue
814                             #
815                             # And - we really should complain to the dorks who configured dak
816                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
817                             self.pkg.changes.setdefault("propdistribution", {})
818                             self.pkg.changes["propdistribution"][addsuite] = 1
819                             cansave = 1
820                         elif not target_version:
821                             # not targets_version is true when the package is NEW
822                             # we could just stick with the "...old version..." REJECT
823                             # for this, I think.
824                             self.reject("Won't propogate NEW packages.")
825                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
826                             # propogation would be redundant. no need to reject though.
827                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
828                             cansave = 1
829                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
830                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
831                             # propogate!!
832                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
833                             self.pkg.changes.setdefault("propdistribution", {})
834                             self.pkg.changes["propdistribution"][addsuite] = 1
835                             cansave = 1
836                 
837                     if not cansave:
838                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
839
840     ################################################################################
841
842     def check_binary_against_db(self, file):
843         self.reject_message = "";
844         files = self.pkg.files;
845
846         # Ensure version is sane
847         q = self.projectB.query("""
848 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
849                                      architecture a
850  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
851    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
852                                 % (files[file]["package"],
853                                    files[file]["architecture"]));
854         self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
855
856         # Check for any existing copies of the file
857         q = self.projectB.query("""
858 SELECT b.id FROM binaries b, architecture a
859  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
860    AND a.id = b.architecture"""
861                                 % (files[file]["package"],
862                                    files[file]["version"],
863                                    files[file]["architecture"]))
864         if q.getresult():
865             self.reject("%s: can not overwrite existing copy already in the archive." % (file));
866
867         return self.reject_message;
868
869     ################################################################################
870
871     def check_source_against_db(self, file):
872         self.reject_message = "";
873         dsc = self.pkg.dsc;
874
875         # Ensure version is sane
876         q = self.projectB.query("""
877 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
878  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
879         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
880
881         return self.reject_message;
882
883     ################################################################################
884
885     # **WARNING**
886     # NB: this function can remove entries from the 'files' index [if
887     # the .orig.tar.gz is a duplicate of the one in the archive]; if
888     # you're iterating over 'files' and call this function as part of
889     # the loop, be sure to add a check to the top of the loop to
890     # ensure you haven't just tried to derefernece the deleted entry.
891     # **WARNING**
892
893     def check_dsc_against_db(self, file):
894         self.reject_message = "";
895         files = self.pkg.files;
896         dsc_files = self.pkg.dsc_files;
897         legacy_source_untouchable = self.pkg.legacy_source_untouchable;
898         self.pkg.orig_tar_gz = None;
899
900         # Try and find all files mentioned in the .dsc.  This has
901         # to work harder to cope with the multiple possible
902         # locations of an .orig.tar.gz.
903         for dsc_file in dsc_files.keys():
904             found = None;
905             if files.has_key(dsc_file):
906                 actual_md5 = files[dsc_file]["md5sum"];
907                 actual_size = int(files[dsc_file]["size"]);
908                 found = "%s in incoming" % (dsc_file)
909                 # Check the file does not already exist in the archive
910                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
911                 ql = q.getresult();
912                 # Strip out anything that isn't '%s' or '/%s$'
913                 for i in ql:
914                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
915                         ql.remove(i);
916
917                 # "[katie] has not broken them.  [katie] has fixed a
918                 # brokenness.  Your crappy hack exploited a bug in
919                 # the old dinstall.
920                 #
921                 # "(Come on!  I thought it was always obvious that
922                 # one just doesn't release different files with
923                 # the same name and version.)"
924                 #                        -- ajk@ on d-devel@l.d.o
925
926                 if ql:
927                     # Ignore exact matches for .orig.tar.gz
928                     match = 0;
929                     if dsc_file.endswith(".orig.tar.gz"):
930                         for i in ql:
931                             if files.has_key(dsc_file) and \
932                                int(files[dsc_file]["size"]) == int(i[0]) and \
933                                files[dsc_file]["md5sum"] == i[1]:
934                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
935                                 del files[dsc_file];
936                                 self.pkg.orig_tar_gz = i[2] + i[3];
937                                 match = 1;
938
939                     if not match:
940                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
941             elif dsc_file.endswith(".orig.tar.gz"):
942                 # Check in the pool
943                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
944                 ql = q.getresult();
945                 # Strip out anything that isn't '%s' or '/%s$'
946                 for i in ql:
947                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
948                         ql.remove(i);
949
950                 if ql:
951                     # Unfortunately, we may get more than one match here if,
952                     # for example, the package was in potato but had an -sa
953                     # upload in woody.  So we need to choose the right one.
954
955                     x = ql[0]; # default to something sane in case we don't match any or have only one
956
957                     if len(ql) > 1:
958                         for i in ql:
959                             old_file = i[0] + i[1];
960                             old_file_fh = utils.open_file(old_file)
961                             actual_md5 = apt_pkg.md5sum(old_file_fh);
962                             old_file_fh.close()
963                             actual_size = os.stat(old_file)[stat.ST_SIZE];
964                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
965                                 x = i;
966                             else:
967                                 legacy_source_untouchable[i[3]] = "";
968
969                     old_file = x[0] + x[1];
970                     old_file_fh = utils.open_file(old_file)
971                     actual_md5 = apt_pkg.md5sum(old_file_fh);
972                     old_file_fh.close()
973                     actual_size = os.stat(old_file)[stat.ST_SIZE];
974                     found = old_file;
975                     suite_type = x[2];
976                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
977                     # See install() in katie...
978                     self.pkg.orig_tar_id = x[3];
979                     self.pkg.orig_tar_gz = old_file;
980                     if suite_type == "legacy" or suite_type == "legacy-mixed":
981                         self.pkg.orig_tar_location = "legacy";
982                     else:
983                         self.pkg.orig_tar_location = x[4];
984                 else:
985                     # Not there? Check the queue directories...
986
987                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
988                     # See process_it() in jennifer for explanation of this
989                     if os.path.exists(in_unchecked):
990                         return (self.reject_message, in_unchecked);
991                     else:
992                         for dir in [ "Accepted", "New", "Byhand" ]:
993                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
994                             if os.path.exists(in_otherdir):
995                                 in_otherdir_fh = utils.open_file(in_otherdir)
996                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh);
997                                 in_otherdir_fh.close()
998                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
999                                 found = in_otherdir;
1000                                 self.pkg.orig_tar_gz = in_otherdir;
1001
1002                     if not found:
1003                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
1004                         self.pkg.orig_tar_gz = -1;
1005                         continue;
1006             else:
1007                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
1008                 continue;
1009             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1010                 self.reject("md5sum for %s doesn't match %s." % (found, file));
1011             if actual_size != int(dsc_files[dsc_file]["size"]):
1012                 self.reject("size for %s doesn't match %s." % (found, file));
1013
1014         return (self.reject_message, None);
1015
1016     def do_query(self, q):
1017         sys.stderr.write("query: \"%s\" ... " % (q));
1018         before = time.time();
1019         r = self.projectB.query(q);
1020         time_diff = time.time()-before;
1021         sys.stderr.write("took %.3f seconds.\n" % (time_diff));
1022         return r;