]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
include override information in accepted mails
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ###############################################################################
36
37 # Convenience wrapper to carry around all the package information in
38
39 class Pkg:
40     def __init__(self, **kwds):
41         self.__dict__.update(kwds)
42
43     def update(self, **kwds):
44         self.__dict__.update(kwds)
45
46 ###############################################################################
47
48 class nmu_p:
49     # Read in the group maintainer override file
50     def __init__ (self, Cnf):
51         self.group_maint = {}
52         self.Cnf = Cnf
53         if Cnf.get("Dinstall::GroupOverrideFilename"):
54             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55             file = utils.open_file(filename)
56             for line in file.readlines():
57                 line = utils.re_comments.sub('', line).lower().strip()
58                 if line != "":
59                     self.group_maint[line] = 1
60             file.close()
61
62     def is_an_nmu (self, pkg):
63         Cnf = self.Cnf
64         changes = pkg.changes
65         dsc = pkg.dsc
66
67         i = utils.fix_maintainer (dsc.get("maintainer",
68                                           Cnf["Dinstall::MyEmailAddress"]).lower())
69         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71         if dsc_name == changes["maintainername"].lower() and \
72            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
73             return 0
74
75         if dsc.has_key("uploaders"):
76             uploaders = dsc["uploaders"].lower().split(",")
77             uploadernames = {}
78             for i in uploaders:
79                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80                 uploadernames[name] = ""
81             if uploadernames.has_key(changes["changedbyname"].lower()):
82                 return 0
83
84         # Some group maintained packages (e.g. Debian QA) are never NMU's
85         if self.group_maint.has_key(changes["maintaineremail"].lower()):
86             return 0
87
88         return 1
89
90 ###############################################################################
91
92 class Upload:
93
94     def __init__(self, Cnf):
95         self.Cnf = Cnf
96         # Read in the group-maint override file
97         self.nmu = nmu_p(Cnf)
98         self.accept_count = 0
99         self.accept_bytes = 0L
100         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101                        legacy_source_untouchable = {})
102
103         # Initialize the substitution template mapping global
104         Subst = self.Subst = {}
105         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
109
110         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111         database.init(Cnf, self.projectB)
112
113     ###########################################################################
114
115     def init_vars (self):
116         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117             exec "self.pkg.%s.clear();" % (i)
118         self.pkg.orig_tar_id = None
119         self.pkg.orig_tar_location = ""
120         self.pkg.orig_tar_gz = None
121
122     ###########################################################################
123
124     def update_vars (self):
125         dump_filename = self.pkg.changes_file[:-8]+".dak"
126         dump_file = utils.open_file(dump_filename)
127         p = cPickle.Unpickler(dump_file)
128         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129             exec "self.pkg.%s.update(p.load());" % (i)
130         for i in [ "orig_tar_id", "orig_tar_location" ]:
131             exec "self.pkg.%s = p.load();" % (i)
132         dump_file.close()
133
134     ###########################################################################
135
136     # This could just dump the dictionaries as is, but I'd like to
137     # avoid this so there's some idea of what process-accepted &
138     # process-new use from process-unchecked
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i)
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145         dump_file = utils.open_file(dump_filename, 'w')
146         try:
147             os.chmod(dump_filename, 0660)
148         except OSError, e:
149             if errno.errorcode[e.errno] == 'EPERM':
150                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151                 if perms & stat.S_IROTH:
152                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
153             else:
154                 raise
155
156         p = cPickle.Pickler(dump_file, 1)
157         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158             exec "%s = {}" % i
159         ## files
160         for file in files.keys():
161             d_files[file] = {}
162             for i in [ "package", "version", "architecture", "type", "size",
163                        "md5sum", "component", "location id", "source package",
164                        "source version", "maintainer", "dbtype", "files id",
165                        "new", "section", "priority", "othercomponents",
166                        "pool name", "original component" ]:
167                 if files[file].has_key(i):
168                     d_files[file][i] = files[file][i]
169         ## changes
170         # Mandatory changes fields
171         for i in [ "distribution", "source", "architecture", "version",
172                    "maintainer", "urgency", "fingerprint", "changedby822",
173                    "changedby2047", "changedbyname", "maintainer822",
174                    "maintainer2047", "maintainername", "maintaineremail",
175                    "closes", "changes" ]:
176             d_changes[i] = changes[i]
177         # Optional changes fields
178         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179             if changes.has_key(i):
180                 d_changes[i] = changes[i]
181         ## dsc
182         for i in [ "source", "version", "maintainer", "fingerprint",
183                    "uploaders", "bts changelog" ]:
184             if dsc.has_key(i):
185                 d_dsc[i] = dsc[i]
186         ## dsc_files
187         for file in dsc_files.keys():
188             d_dsc_files[file] = {}
189             # Mandatory dsc_files fields
190             for i in [ "size", "md5sum" ]:
191                 d_dsc_files[file][i] = dsc_files[file][i]
192             # Optional dsc_files fields
193             for i in [ "files id" ]:
194                 if dsc_files[file].has_key(i):
195                     d_dsc_files[file][i] = dsc_files[file][i]
196
197         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199             p.dump(i)
200         dump_file.close()
201
202     ###########################################################################
203
204     # Set up the per-package template substitution mappings
205
206     def update_subst (self, reject_message = ""):
207         Subst = self.Subst
208         changes = self.pkg.changes
209         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211             changes["architecture"] = { "Unknown" : "" }
212         # and maintainer2047 may not exist.
213         if not changes.has_key("maintainer2047"):
214             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
215
216         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
219
220         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224                                                      changes["maintainer2047"])
225             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
226         else:
227             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232
233         # Apply any global override of the Maintainer field
234         if self.Cnf.get("Dinstall::OverrideMaintainer"):
235             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
237
238         Subst["__REJECT_MESSAGE__"] = reject_message
239         Subst["__SOURCE__"] = changes.get("source", "Unknown")
240         Subst["__VERSION__"] = changes.get("version", "Unknown")
241
242     ###########################################################################
243
244     def build_summaries(self):
245         changes = self.pkg.changes
246         files = self.pkg.files
247
248         byhand = summary = new = ""
249
250         # changes["distribution"] may not exist in corner cases
251         # (e.g. unreadable changes files)
252         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253             changes["distribution"] = {}
254
255         override_summary ="";
256         file_keys = files.keys()
257         file_keys.sort()
258         for file in file_keys:
259             if files[file].has_key("byhand"):
260                 byhand = 1
261                 summary += file + " byhand\n"
262             elif files[file].has_key("new"):
263                 new = 1
264                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265                 if files[file].has_key("othercomponents"):
266                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267                 if files[file]["type"] == "deb":
268                     deb_fh = utils.open_file(file)
269                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
270                     deb_fh.close()
271             else:
272                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274                 summary += file + "\n  to " + destination + "\n"
275                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
276                     override_summary += "%s - %s %s\n" % (file, files[file]["priority"], files[file]["section"])
277
278         short_summary = summary
279
280         # This is for direport's benefit...
281         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
282
283         if byhand or new:
284             summary += "Changes: " + f
285
286         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
287
288         summary += self.announce(short_summary, 0)
289
290         return (summary, short_summary)
291
292     ###########################################################################
293
294     def close_bugs (self, summary, action):
295         changes = self.pkg.changes
296         Subst = self.Subst
297         Cnf = self.Cnf
298
299         bugs = changes["closes"].keys()
300
301         if not bugs:
302             return summary
303
304         bugs.sort()
305         if not self.nmu.is_an_nmu(self.pkg):
306             if changes["distribution"].has_key("experimental"):
307                 # tag bugs as fixed-in-experimental for uploads to experimental
308                 summary += "Setting bugs to severity fixed: "
309                 control_message = ""
310                 for bug in bugs:
311                     summary += "%s " % (bug)
312                     control_message += "tag %s + fixed-in-experimental\n" % (bug)
313                 if action and control_message != "":
314                     Subst["__CONTROL_MESSAGE__"] = control_message
315                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
316                     utils.send_mail (mail_message)
317                 if action:
318                     self.Logger.log(["setting bugs to fixed"]+bugs)
319
320
321             else:
322                 summary += "Closing bugs: "
323                 for bug in bugs:
324                     summary += "%s " % (bug)
325                     if action:
326                         Subst["__BUG_NUMBER__"] = bug
327                         if changes["distribution"].has_key("stable"):
328                             Subst["__STABLE_WARNING__"] = """
329 Note that this package is not part of the released stable Debian
330 distribution.  It may have dependencies on other unreleased software,
331 or other instabilities.  Please take care if you wish to install it.
332 The update will eventually make its way into the next released Debian
333 distribution."""
334                         else:
335                             Subst["__STABLE_WARNING__"] = ""
336                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
337                             utils.send_mail (mail_message)
338                 if action:
339                     self.Logger.log(["closing bugs"]+bugs)
340
341         else:                     # NMU
342             summary += "Setting bugs to severity fixed: "
343             control_message = ""
344             for bug in bugs:
345                 summary += "%s " % (bug)
346                 control_message += "tag %s + fixed\n" % (bug)
347             if action and control_message != "":
348                 Subst["__CONTROL_MESSAGE__"] = control_message
349                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
350                 utils.send_mail (mail_message)
351             if action:
352                 self.Logger.log(["setting bugs to fixed"]+bugs)
353         summary += "\n"
354         return summary
355
356     ###########################################################################
357
358     def announce (self, short_summary, action):
359         Subst = self.Subst
360         Cnf = self.Cnf
361         changes = self.pkg.changes
362
363         # Only do announcements for source uploads with a recent dpkg-dev installed
364         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
365             return ""
366
367         lists_done = {}
368         summary = ""
369         Subst["__SHORT_SUMMARY__"] = short_summary
370
371         for dist in changes["distribution"].keys():
372             list = Cnf.Find("Suite::%s::Announce" % (dist))
373             if list == "" or lists_done.has_key(list):
374                 continue
375             lists_done[list] = 1
376             summary += "Announcing to %s\n" % (list)
377
378             if action:
379                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
380                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
381                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
382                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
383                 utils.send_mail (mail_message)
384
385         if Cnf.FindB("Dinstall::CloseBugs"):
386             summary = self.close_bugs(summary, action)
387
388         return summary
389
390     ###########################################################################
391
392     def accept (self, summary, short_summary):
393         Cnf = self.Cnf
394         Subst = self.Subst
395         files = self.pkg.files
396         changes = self.pkg.changes
397         changes_file = self.pkg.changes_file
398         dsc = self.pkg.dsc
399
400         print "Accepting."
401         self.Logger.log(["Accepting changes",changes_file])
402
403         self.dump_vars(Cnf["Dir::Queue::Accepted"])
404
405         # Move all the files into the accepted directory
406         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
407         file_keys = files.keys()
408         for file in file_keys:
409             utils.move(file, Cnf["Dir::Queue::Accepted"])
410             self.accept_bytes += float(files[file]["size"])
411         self.accept_count += 1
412
413         # Send accept mail, announce to lists, close bugs and check for
414         # override disparities
415         if not Cnf["Dinstall::Options::No-Mail"]:
416             Subst["__SUITE__"] = ""
417             Subst["__SUMMARY__"] = summary
418             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
419             utils.send_mail(mail_message)
420             self.announce(short_summary, 1)
421
422
423         ## Helper stuff for DebBugs Version Tracking
424         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
425             # ??? once queue/* is cleared on *.d.o and/or reprocessed
426             # the conditionalization on dsc["bts changelog"] should be
427             # dropped.
428
429             # Write out the version history from the changelog
430             if changes["architecture"].has_key("source") and \
431                dsc.has_key("bts changelog"):
432
433                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
434                                                     dotprefix=1, perms=0644)
435                 version_history = utils.open_file(temp_filename, 'w')
436                 version_history.write(dsc["bts changelog"])
437                 version_history.close()
438                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
439                                       changes_file[:-8]+".versions")
440                 os.rename(temp_filename, filename)
441
442             # Write out the binary -> source mapping.
443             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
444                                                 dotprefix=1, perms=0644)
445             debinfo = utils.open_file(temp_filename, 'w')
446             for file in file_keys:
447                 f = files[file]
448                 if f["type"] == "deb":
449                     line = " ".join([f["package"], f["version"],
450                                      f["architecture"], f["source package"],
451                                      f["source version"]])
452                     debinfo.write(line+"\n")
453             debinfo.close()
454             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
455                                   changes_file[:-8]+".debinfo")
456             os.rename(temp_filename, filename)
457
458         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
459
460     ###########################################################################
461
462     def queue_build (self, queue, path):
463         Cnf = self.Cnf
464         Subst = self.Subst
465         files = self.pkg.files
466         changes = self.pkg.changes
467         changes_file = self.pkg.changes_file
468         dsc = self.pkg.dsc
469         file_keys = files.keys()
470
471         ## Special support to enable clean auto-building of queued packages
472         queue_id = database.get_or_set_queue_id(queue)
473
474         self.projectB.query("BEGIN WORK")
475         for suite in changes["distribution"].keys():
476             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
477                 continue
478             suite_id = database.get_suite_id(suite)
479             dest_dir = Cnf["Dir::QueueBuild"]
480             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
481                 dest_dir = os.path.join(dest_dir, suite)
482             for file in file_keys:
483                 src = os.path.join(path, file)
484                 dest = os.path.join(dest_dir, file)
485                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
486                     # Copy it since the original won't be readable by www-data
487                     utils.copy(src, dest)
488                 else:
489                     # Create a symlink to it
490                     os.symlink(src, dest)
491                 # Add it to the list of packages for later processing by apt-ftparchive
492                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
493             # If the .orig.tar.gz is in the pool, create a symlink to
494             # it (if one doesn't already exist)
495             if self.pkg.orig_tar_id:
496                 # Determine the .orig.tar.gz file name
497                 for dsc_file in self.pkg.dsc_files.keys():
498                     if dsc_file.endswith(".orig.tar.gz"):
499                         filename = dsc_file
500                 dest = os.path.join(dest_dir, filename)
501                 # If it doesn't exist, create a symlink
502                 if not os.path.exists(dest):
503                     # Find the .orig.tar.gz in the pool
504                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
505                     ql = q.getresult()
506                     if not ql:
507                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
508                     src = os.path.join(ql[0][0], ql[0][1])
509                     os.symlink(src, dest)
510                     # Add it to the list of packages for later processing by apt-ftparchive
511                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
512                 # if it does, update things to ensure it's not removed prematurely
513                 else:
514                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
515
516         self.projectB.query("COMMIT WORK")
517
518     ###########################################################################
519
520     def check_override (self):
521         Subst = self.Subst
522         changes = self.pkg.changes
523         files = self.pkg.files
524         Cnf = self.Cnf
525
526         # Abandon the check if:
527         #  a) it's a non-sourceful upload
528         #  b) override disparity checks have been disabled
529         #  c) we're not sending mail
530         if not changes["architecture"].has_key("source") or \
531            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
532            Cnf["Dinstall::Options::No-Mail"]:
533             return
534
535         summary = ""
536         file_keys = files.keys()
537         file_keys.sort()
538         for file in file_keys:
539             if not files[file].has_key("new") and files[file]["type"] == "deb":
540                 section = files[file]["section"]
541                 override_section = files[file]["override section"]
542                 if section.lower() != override_section.lower() and section != "-":
543                     # Ignore this; it's a common mistake and not worth whining about
544                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
545                         continue
546                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
547                 priority = files[file]["priority"]
548                 override_priority = files[file]["override priority"]
549                 if priority != override_priority and priority != "-":
550                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
551
552         if summary == "":
553             return
554
555         Subst["__SUMMARY__"] = summary
556         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
557         utils.send_mail(mail_message)
558
559     ###########################################################################
560
561     def force_reject (self, files):
562         """Forcefully move files from the current directory to the
563            reject directory.  If any file already exists in the reject
564            directory it will be moved to the morgue to make way for
565            the new file."""
566
567         Cnf = self.Cnf
568
569         for file in files:
570             # Skip any files which don't exist or which we don't have permission to copy.
571             if os.access(file,os.R_OK) == 0:
572                 continue
573             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
574             try:
575                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
576             except OSError, e:
577                 # File exists?  Let's try and move it to the morgue
578                 if errno.errorcode[e.errno] == 'EEXIST':
579                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
580                     try:
581                         morgue_file = utils.find_next_free(morgue_file)
582                     except utils.tried_too_hard_exc:
583                         # Something's either gone badly Pete Tong, or
584                         # someone is trying to exploit us.
585                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
586                         return
587                     utils.move(dest_file, morgue_file, perms=0660)
588                     try:
589                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
590                     except OSError, e:
591                         # Likewise
592                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
593                         return
594                 else:
595                     raise
596             # If we got here, we own the destination file, so we can
597             # safely overwrite it.
598             utils.move(file, dest_file, 1, perms=0660)
599             os.close(dest_fd)
600
601     ###########################################################################
602
603     def do_reject (self, manual = 0, reject_message = ""):
604         # If we weren't given a manual rejection message, spawn an
605         # editor so the user can add one in...
606         if manual and not reject_message:
607             temp_filename = utils.temp_filename()
608             editor = os.environ.get("EDITOR","vi")
609             answer = 'E'
610             while answer == 'E':
611                 os.system("%s %s" % (editor, temp_filename))
612                 temp_fh = utils.open_file(temp_filename)
613                 reject_message = "".join(temp_fh.readlines())
614                 temp_fh.close()
615                 print "Reject message:"
616                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
617                 prompt = "[R]eject, Edit, Abandon, Quit ?"
618                 answer = "XXX"
619                 while prompt.find(answer) == -1:
620                     answer = utils.our_raw_input(prompt)
621                     m = re_default_answer.search(prompt)
622                     if answer == "":
623                         answer = m.group(1)
624                     answer = answer[:1].upper()
625             os.unlink(temp_filename)
626             if answer == 'A':
627                 return 1
628             elif answer == 'Q':
629                 sys.exit(0)
630
631         print "Rejecting.\n"
632
633         Cnf = self.Cnf
634         Subst = self.Subst
635         pkg = self.pkg
636
637         reason_filename = pkg.changes_file[:-8] + ".reason"
638         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
639
640         # Move all the files into the reject directory
641         reject_files = pkg.files.keys() + [pkg.changes_file]
642         self.force_reject(reject_files)
643
644         # If we fail here someone is probably trying to exploit the race
645         # so let's just raise an exception ...
646         if os.path.exists(reason_filename):
647             os.unlink(reason_filename)
648         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
649
650         if not manual:
651             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
652             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
653             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
654             os.write(reason_fd, reject_message)
655             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
656         else:
657             # Build up the rejection email
658             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
659
660             Subst["__REJECTOR_ADDRESS__"] = user_email_address
661             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
662             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
663             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
664             # Write the rejection email out as the <foo>.reason file
665             os.write(reason_fd, reject_mail_message)
666
667         os.close(reason_fd)
668
669         # Send the rejection mail if appropriate
670         if not Cnf["Dinstall::Options::No-Mail"]:
671             utils.send_mail(reject_mail_message)
672
673         self.Logger.log(["rejected", pkg.changes_file])
674         return 0
675
676     ################################################################################
677
678     # Ensure that source exists somewhere in the archive for the binary
679     # upload being processed.
680     #
681     # (1) exact match                      => 1.0-3
682     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
683
684     def source_exists (self, package, source_version, suites = ["any"]):
685         okay = 1
686         for suite in suites:
687             if suite == "any":
688                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
689                     (package)
690             else:
691                 # source must exist in suite X, or in some other suite that's
692                 # mapped to X, recursively... silent-maps are counted too,
693                 # unreleased-maps aren't.
694                 maps = self.Cnf.ValueList("SuiteMappings")[:]
695                 maps.reverse()
696                 maps = [ m.split() for m in maps ]
697                 maps = [ (x[1], x[2]) for x in maps
698                                 if x[0] == "map" or x[0] == "silent-map" ]
699                 s = [suite]
700                 for x in maps:
701                         if x[1] in s and x[0] not in s:
702                                 s.append(x[0])
703
704                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
705             q = self.projectB.query(que)
706
707             # Reduce the query results to a list of version numbers
708             ql = [ i[0] for i in q.getresult() ]
709
710             # Try (1)
711             if source_version in ql:
712                 continue
713
714             # Try (2)
715             orig_source_version = re_bin_only_nmu.sub('', source_version)
716             if orig_source_version in ql:
717                 continue
718
719             # No source found...
720             okay = 0
721             break
722         return okay
723
724     ################################################################################
725     
726     def in_override_p (self, package, component, suite, binary_type, file):
727         files = self.pkg.files
728
729         if binary_type == "": # must be source
730             type = "dsc"
731         else:
732             type = binary_type
733
734         # Override suite name; used for example with proposed-updates
735         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
736             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
737
738         # Avoid <undef> on unknown distributions
739         suite_id = database.get_suite_id(suite)
740         if suite_id == -1:
741             return None
742         component_id = database.get_component_id(component)
743         type_id = database.get_override_type_id(type)
744
745         # FIXME: nasty non-US speficic hack
746         if component.lower().startswith("non-us/"):
747             component = component[7:]
748
749         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
750                            % (package, suite_id, component_id, type_id))
751         result = q.getresult()
752         # If checking for a source package fall back on the binary override type
753         if type == "dsc" and not result:
754             deb_type_id = database.get_override_type_id("deb")
755             udeb_type_id = database.get_override_type_id("udeb")
756             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
757                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
758             result = q.getresult()
759
760         # Remember the section and priority so we can check them later if appropriate
761         if result:
762             files[file]["override section"] = result[0][0]
763             files[file]["override priority"] = result[0][1]
764
765         return result
766
767     ################################################################################
768
769     def reject (self, str, prefix="Rejected: "):
770         if str:
771             # Unlike other rejects we add new lines first to avoid trailing
772             # new lines when this message is passed back up to a caller.
773             if self.reject_message:
774                 self.reject_message += "\n"
775             self.reject_message += prefix + str
776
777     ################################################################################
778
779     def get_anyversion(self, query_result, suite):
780         anyversion=None
781         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
782         for (v, s) in query_result:
783             if s in [ x.lower() for x in anysuite ]:
784                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
785                     anyversion=v
786         return anyversion
787
788     ################################################################################
789
790     def cross_suite_version_check(self, query_result, file, new_version):
791         """Ensure versions are newer than existing packages in target
792         suites and that cross-suite version checking rules as
793         set out in the conf file are satisfied."""
794
795         # Check versions for each target suite
796         for target_suite in self.pkg.changes["distribution"].keys():
797             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
798             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
799             # Enforce "must be newer than target suite" even if conffile omits it
800             if target_suite not in must_be_newer_than:
801                 must_be_newer_than.append(target_suite)
802             for entry in query_result:
803                 existent_version = entry[0]
804                 suite = entry[1]
805                 if suite in must_be_newer_than and \
806                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
807                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
808                 if suite in must_be_older_than and \
809                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
810                     ch = self.pkg.changes
811                     cansave = 0
812                     if ch.get('distribution-version', {}).has_key(suite):
813                         # we really use the other suite, ignoring the conflicting one ...
814                         addsuite = ch["distribution-version"][suite]
815                     
816                         add_version = self.get_anyversion(query_result, addsuite)
817                         target_version = self.get_anyversion(query_result, target_suite)
818                     
819                         if not add_version:
820                             # not add_version can only happen if we map to a suite
821                             # that doesn't enhance the suite we're propup'ing from.
822                             # so "propup-ver x a b c; map a d" is a problem only if
823                             # d doesn't enhance a.
824                             #
825                             # i think we could always propagate in this case, rather
826                             # than complaining. either way, this isn't a REJECT issue
827                             #
828                             # And - we really should complain to the dorks who configured dak
829                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
830                             self.pkg.changes.setdefault("propdistribution", {})
831                             self.pkg.changes["propdistribution"][addsuite] = 1
832                             cansave = 1
833                         elif not target_version:
834                             # not targets_version is true when the package is NEW
835                             # we could just stick with the "...old version..." REJECT
836                             # for this, I think.
837                             self.reject("Won't propogate NEW packages.")
838                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
839                             # propogation would be redundant. no need to reject though.
840                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
841                             cansave = 1
842                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
843                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
844                             # propogate!!
845                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
846                             self.pkg.changes.setdefault("propdistribution", {})
847                             self.pkg.changes["propdistribution"][addsuite] = 1
848                             cansave = 1
849                 
850                     if not cansave:
851                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
852
853     ################################################################################
854
855     def check_binary_against_db(self, file):
856         self.reject_message = ""
857         files = self.pkg.files
858
859         # Ensure version is sane
860         q = self.projectB.query("""
861 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
862                                      architecture a
863  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
864    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
865                                 % (files[file]["package"],
866                                    files[file]["architecture"]))
867         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
868
869         # Check for any existing copies of the file
870         q = self.projectB.query("""
871 SELECT b.id FROM binaries b, architecture a
872  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
873    AND a.id = b.architecture"""
874                                 % (files[file]["package"],
875                                    files[file]["version"],
876                                    files[file]["architecture"]))
877         if q.getresult():
878             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
879
880         return self.reject_message
881
882     ################################################################################
883
884     def check_source_against_db(self, file):
885         self.reject_message = ""
886         dsc = self.pkg.dsc
887
888         # Ensure version is sane
889         q = self.projectB.query("""
890 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
891  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
892         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
893
894         return self.reject_message
895
896     ################################################################################
897
898     # **WARNING**
899     # NB: this function can remove entries from the 'files' index [if
900     # the .orig.tar.gz is a duplicate of the one in the archive]; if
901     # you're iterating over 'files' and call this function as part of
902     # the loop, be sure to add a check to the top of the loop to
903     # ensure you haven't just tried to derefernece the deleted entry.
904     # **WARNING**
905
906     def check_dsc_against_db(self, file):
907         self.reject_message = ""
908         files = self.pkg.files
909         dsc_files = self.pkg.dsc_files
910         legacy_source_untouchable = self.pkg.legacy_source_untouchable
911         self.pkg.orig_tar_gz = None
912
913         # Try and find all files mentioned in the .dsc.  This has
914         # to work harder to cope with the multiple possible
915         # locations of an .orig.tar.gz.
916         for dsc_file in dsc_files.keys():
917             found = None
918             if files.has_key(dsc_file):
919                 actual_md5 = files[dsc_file]["md5sum"]
920                 actual_size = int(files[dsc_file]["size"])
921                 found = "%s in incoming" % (dsc_file)
922                 # Check the file does not already exist in the archive
923                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
924                 ql = q.getresult()
925                 # Strip out anything that isn't '%s' or '/%s$'
926                 for i in ql:
927                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
928                         ql.remove(i)
929
930                 # "[dak] has not broken them.  [dak] has fixed a
931                 # brokenness.  Your crappy hack exploited a bug in
932                 # the old dinstall.
933                 #
934                 # "(Come on!  I thought it was always obvious that
935                 # one just doesn't release different files with
936                 # the same name and version.)"
937                 #                        -- ajk@ on d-devel@l.d.o
938
939                 if ql:
940                     # Ignore exact matches for .orig.tar.gz
941                     match = 0
942                     if dsc_file.endswith(".orig.tar.gz"):
943                         for i in ql:
944                             if files.has_key(dsc_file) and \
945                                int(files[dsc_file]["size"]) == int(i[0]) and \
946                                files[dsc_file]["md5sum"] == i[1]:
947                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
948                                 del files[dsc_file]
949                                 self.pkg.orig_tar_gz = i[2] + i[3]
950                                 match = 1
951
952                     if not match:
953                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
954             elif dsc_file.endswith(".orig.tar.gz"):
955                 # Check in the pool
956                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
957                 ql = q.getresult()
958                 # Strip out anything that isn't '%s' or '/%s$'
959                 for i in ql:
960                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
961                         ql.remove(i)
962
963                 if ql:
964                     # Unfortunately, we may get more than one match here if,
965                     # for example, the package was in potato but had an -sa
966                     # upload in woody.  So we need to choose the right one.
967
968                     x = ql[0]; # default to something sane in case we don't match any or have only one
969
970                     if len(ql) > 1:
971                         for i in ql:
972                             old_file = i[0] + i[1]
973                             old_file_fh = utils.open_file(old_file)
974                             actual_md5 = apt_pkg.md5sum(old_file_fh)
975                             old_file_fh.close()
976                             actual_size = os.stat(old_file)[stat.ST_SIZE]
977                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
978                                 x = i
979                             else:
980                                 legacy_source_untouchable[i[3]] = ""
981
982                     old_file = x[0] + x[1]
983                     old_file_fh = utils.open_file(old_file)
984                     actual_md5 = apt_pkg.md5sum(old_file_fh)
985                     old_file_fh.close()
986                     actual_size = os.stat(old_file)[stat.ST_SIZE]
987                     found = old_file
988                     suite_type = x[2]
989                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
990                     # See install() in process-accepted...
991                     self.pkg.orig_tar_id = x[3]
992                     self.pkg.orig_tar_gz = old_file
993                     if suite_type == "legacy" or suite_type == "legacy-mixed":
994                         self.pkg.orig_tar_location = "legacy"
995                     else:
996                         self.pkg.orig_tar_location = x[4]
997                 else:
998                     # Not there? Check the queue directories...
999
1000                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1001                     # See process_it() in 'dak process-unchecked' for explanation of this
1002                     if os.path.exists(in_unchecked):
1003                         return (self.reject_message, in_unchecked)
1004                     else:
1005                         for dir in [ "Accepted", "New", "Byhand" ]:
1006                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1007                             if os.path.exists(in_otherdir):
1008                                 in_otherdir_fh = utils.open_file(in_otherdir)
1009                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1010                                 in_otherdir_fh.close()
1011                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1012                                 found = in_otherdir
1013                                 self.pkg.orig_tar_gz = in_otherdir
1014
1015                     if not found:
1016                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1017                         self.pkg.orig_tar_gz = -1
1018                         continue
1019             else:
1020                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1021                 continue
1022             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1023                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1024             if actual_size != int(dsc_files[dsc_file]["size"]):
1025                 self.reject("size for %s doesn't match %s." % (found, file))
1026
1027         return (self.reject_message, None)
1028
1029     def do_query(self, q):
1030         sys.stderr.write("query: \"%s\" ... " % (q))
1031         before = time.time()
1032         r = self.projectB.query(q)
1033         time_diff = time.time()-before
1034         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1035         return r