]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
use correct override information for pre-existing packages
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ###############################################################################
36
37 # Convenience wrapper to carry around all the package information in
38
39 class Pkg:
40     def __init__(self, **kwds):
41         self.__dict__.update(kwds)
42
43     def update(self, **kwds):
44         self.__dict__.update(kwds)
45
46 ###############################################################################
47
48 class nmu_p:
49     # Read in the group maintainer override file
50     def __init__ (self, Cnf):
51         self.group_maint = {}
52         self.Cnf = Cnf
53         if Cnf.get("Dinstall::GroupOverrideFilename"):
54             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55             file = utils.open_file(filename)
56             for line in file.readlines():
57                 line = utils.re_comments.sub('', line).lower().strip()
58                 if line != "":
59                     self.group_maint[line] = 1
60             file.close()
61
62     def is_an_nmu (self, pkg):
63         Cnf = self.Cnf
64         changes = pkg.changes
65         dsc = pkg.dsc
66
67         i = utils.fix_maintainer (dsc.get("maintainer",
68                                           Cnf["Dinstall::MyEmailAddress"]).lower())
69         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71         if dsc_name == changes["maintainername"].lower() and \
72            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
73             return 0
74
75         if dsc.has_key("uploaders"):
76             uploaders = dsc["uploaders"].lower().split(",")
77             uploadernames = {}
78             for i in uploaders:
79                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80                 uploadernames[name] = ""
81             if uploadernames.has_key(changes["changedbyname"].lower()):
82                 return 0
83
84         # Some group maintained packages (e.g. Debian QA) are never NMU's
85         if self.group_maint.has_key(changes["maintaineremail"].lower()):
86             return 0
87
88         return 1
89
90 ###############################################################################
91
92 class Upload:
93
94     def __init__(self, Cnf):
95         self.Cnf = Cnf
96         # Read in the group-maint override file
97         self.nmu = nmu_p(Cnf)
98         self.accept_count = 0
99         self.accept_bytes = 0L
100         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101                        legacy_source_untouchable = {})
102
103         # Initialize the substitution template mapping global
104         Subst = self.Subst = {}
105         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
109
110         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111         database.init(Cnf, self.projectB)
112
113     ###########################################################################
114
115     def init_vars (self):
116         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117             exec "self.pkg.%s.clear();" % (i)
118         self.pkg.orig_tar_id = None
119         self.pkg.orig_tar_location = ""
120         self.pkg.orig_tar_gz = None
121
122     ###########################################################################
123
124     def update_vars (self):
125         dump_filename = self.pkg.changes_file[:-8]+".dak"
126         dump_file = utils.open_file(dump_filename)
127         p = cPickle.Unpickler(dump_file)
128         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129             exec "self.pkg.%s.update(p.load());" % (i)
130         for i in [ "orig_tar_id", "orig_tar_location" ]:
131             exec "self.pkg.%s = p.load();" % (i)
132         dump_file.close()
133
134     ###########################################################################
135
136     # This could just dump the dictionaries as is, but I'd like to
137     # avoid this so there's some idea of what process-accepted &
138     # process-new use from process-unchecked
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i)
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145         dump_file = utils.open_file(dump_filename, 'w')
146         try:
147             os.chmod(dump_filename, 0660)
148         except OSError, e:
149             if errno.errorcode[e.errno] == 'EPERM':
150                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151                 if perms & stat.S_IROTH:
152                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
153             else:
154                 raise
155
156         p = cPickle.Pickler(dump_file, 1)
157         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158             exec "%s = {}" % i
159         ## files
160         for file in files.keys():
161             d_files[file] = {}
162             for i in [ "package", "version", "architecture", "type", "size",
163                        "md5sum", "component", "location id", "source package",
164                        "source version", "maintainer", "dbtype", "files id",
165                        "new", "section", "priority", "othercomponents",
166                        "pool name", "original component" ]:
167                 if files[file].has_key(i):
168                     d_files[file][i] = files[file][i]
169         ## changes
170         # Mandatory changes fields
171         for i in [ "distribution", "source", "architecture", "version",
172                    "maintainer", "urgency", "fingerprint", "changedby822",
173                    "changedby2047", "changedbyname", "maintainer822",
174                    "maintainer2047", "maintainername", "maintaineremail",
175                    "closes", "changes" ]:
176             d_changes[i] = changes[i]
177         # Optional changes fields
178         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179             if changes.has_key(i):
180                 d_changes[i] = changes[i]
181         ## dsc
182         for i in [ "source", "version", "maintainer", "fingerprint",
183                    "uploaders", "bts changelog" ]:
184             if dsc.has_key(i):
185                 d_dsc[i] = dsc[i]
186         ## dsc_files
187         for file in dsc_files.keys():
188             d_dsc_files[file] = {}
189             # Mandatory dsc_files fields
190             for i in [ "size", "md5sum" ]:
191                 d_dsc_files[file][i] = dsc_files[file][i]
192             # Optional dsc_files fields
193             for i in [ "files id" ]:
194                 if dsc_files[file].has_key(i):
195                     d_dsc_files[file][i] = dsc_files[file][i]
196
197         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199             p.dump(i)
200         dump_file.close()
201
202     ###########################################################################
203
204     # Set up the per-package template substitution mappings
205
206     def update_subst (self, reject_message = ""):
207         Subst = self.Subst
208         changes = self.pkg.changes
209         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211             changes["architecture"] = { "Unknown" : "" }
212         # and maintainer2047 may not exist.
213         if not changes.has_key("maintainer2047"):
214             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
215
216         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
219
220         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224                                                      changes["maintainer2047"])
225             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
226         else:
227             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232
233         # Apply any global override of the Maintainer field
234         if self.Cnf.get("Dinstall::OverrideMaintainer"):
235             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
237
238         Subst["__REJECT_MESSAGE__"] = reject_message
239         Subst["__SOURCE__"] = changes.get("source", "Unknown")
240         Subst["__VERSION__"] = changes.get("version", "Unknown")
241
242     ###########################################################################
243
244     def build_summaries(self):
245         changes = self.pkg.changes
246         files = self.pkg.files
247
248         byhand = summary = new = ""
249
250         # changes["distribution"] may not exist in corner cases
251         # (e.g. unreadable changes files)
252         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253             changes["distribution"] = {}
254
255         override_summary ="";
256         file_keys = files.keys()
257         file_keys.sort()
258         for file in file_keys:
259             if files[file].has_key("byhand"):
260                 byhand = 1
261                 summary += file + " byhand\n"
262             elif files[file].has_key("new"):
263                 new = 1
264                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265                 if files[file].has_key("othercomponents"):
266                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267                 if files[file]["type"] == "deb":
268                     deb_fh = utils.open_file(file)
269                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
270                     deb_fh.close()
271             else:
272                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274                 summary += file + "\n  to " + destination + "\n"
275                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
276                     # (queue/unchecked), there we have override entries already, use them
277                     # (process-new), there we dont have override entries, use the newly generated ones.
278                     override_prio = files[file].get("override priority", files[file]["priority"])
279                     override_sect = files[file].get("override section", files[file]["section"])
280                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
281
282         short_summary = summary
283
284         # This is for direport's benefit...
285         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
286
287         if byhand or new:
288             summary += "Changes: " + f
289
290         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
291
292         summary += self.announce(short_summary, 0)
293
294         return (summary, short_summary)
295
296     ###########################################################################
297
298     def close_bugs (self, summary, action):
299         changes = self.pkg.changes
300         Subst = self.Subst
301         Cnf = self.Cnf
302
303         bugs = changes["closes"].keys()
304
305         if not bugs:
306             return summary
307
308         bugs.sort()
309         if not self.nmu.is_an_nmu(self.pkg):
310             if changes["distribution"].has_key("experimental"):
311                 # tag bugs as fixed-in-experimental for uploads to experimental
312                 summary += "Setting bugs to severity fixed: "
313                 control_message = ""
314                 for bug in bugs:
315                     summary += "%s " % (bug)
316                     control_message += "tag %s + fixed-in-experimental\n" % (bug)
317                 if action and control_message != "":
318                     Subst["__CONTROL_MESSAGE__"] = control_message
319                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
320                     utils.send_mail (mail_message)
321                 if action:
322                     self.Logger.log(["setting bugs to fixed"]+bugs)
323
324
325             else:
326                 summary += "Closing bugs: "
327                 for bug in bugs:
328                     summary += "%s " % (bug)
329                     if action:
330                         Subst["__BUG_NUMBER__"] = bug
331                         if changes["distribution"].has_key("stable"):
332                             Subst["__STABLE_WARNING__"] = """
333 Note that this package is not part of the released stable Debian
334 distribution.  It may have dependencies on other unreleased software,
335 or other instabilities.  Please take care if you wish to install it.
336 The update will eventually make its way into the next released Debian
337 distribution."""
338                         else:
339                             Subst["__STABLE_WARNING__"] = ""
340                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
341                             utils.send_mail (mail_message)
342                 if action:
343                     self.Logger.log(["closing bugs"]+bugs)
344
345         else:                     # NMU
346             summary += "Setting bugs to severity fixed: "
347             control_message = ""
348             for bug in bugs:
349                 summary += "%s " % (bug)
350                 control_message += "tag %s + fixed\n" % (bug)
351             if action and control_message != "":
352                 Subst["__CONTROL_MESSAGE__"] = control_message
353                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
354                 utils.send_mail (mail_message)
355             if action:
356                 self.Logger.log(["setting bugs to fixed"]+bugs)
357         summary += "\n"
358         return summary
359
360     ###########################################################################
361
362     def announce (self, short_summary, action):
363         Subst = self.Subst
364         Cnf = self.Cnf
365         changes = self.pkg.changes
366
367         # Only do announcements for source uploads with a recent dpkg-dev installed
368         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
369             return ""
370
371         lists_done = {}
372         summary = ""
373         Subst["__SHORT_SUMMARY__"] = short_summary
374
375         for dist in changes["distribution"].keys():
376             list = Cnf.Find("Suite::%s::Announce" % (dist))
377             if list == "" or lists_done.has_key(list):
378                 continue
379             lists_done[list] = 1
380             summary += "Announcing to %s\n" % (list)
381
382             if action:
383                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
384                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
385                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
386                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
387                 utils.send_mail (mail_message)
388
389         if Cnf.FindB("Dinstall::CloseBugs"):
390             summary = self.close_bugs(summary, action)
391
392         return summary
393
394     ###########################################################################
395
396     def accept (self, summary, short_summary):
397         Cnf = self.Cnf
398         Subst = self.Subst
399         files = self.pkg.files
400         changes = self.pkg.changes
401         changes_file = self.pkg.changes_file
402         dsc = self.pkg.dsc
403
404         print "Accepting."
405         self.Logger.log(["Accepting changes",changes_file])
406
407         self.dump_vars(Cnf["Dir::Queue::Accepted"])
408
409         # Move all the files into the accepted directory
410         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
411         file_keys = files.keys()
412         for file in file_keys:
413             utils.move(file, Cnf["Dir::Queue::Accepted"])
414             self.accept_bytes += float(files[file]["size"])
415         self.accept_count += 1
416
417         # Send accept mail, announce to lists, close bugs and check for
418         # override disparities
419         if not Cnf["Dinstall::Options::No-Mail"]:
420             Subst["__SUITE__"] = ""
421             Subst["__SUMMARY__"] = summary
422             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
423             utils.send_mail(mail_message)
424             self.announce(short_summary, 1)
425
426
427         ## Helper stuff for DebBugs Version Tracking
428         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
429             # ??? once queue/* is cleared on *.d.o and/or reprocessed
430             # the conditionalization on dsc["bts changelog"] should be
431             # dropped.
432
433             # Write out the version history from the changelog
434             if changes["architecture"].has_key("source") and \
435                dsc.has_key("bts changelog"):
436
437                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
438                                                     dotprefix=1, perms=0644)
439                 version_history = utils.open_file(temp_filename, 'w')
440                 version_history.write(dsc["bts changelog"])
441                 version_history.close()
442                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
443                                       changes_file[:-8]+".versions")
444                 os.rename(temp_filename, filename)
445
446             # Write out the binary -> source mapping.
447             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
448                                                 dotprefix=1, perms=0644)
449             debinfo = utils.open_file(temp_filename, 'w')
450             for file in file_keys:
451                 f = files[file]
452                 if f["type"] == "deb":
453                     line = " ".join([f["package"], f["version"],
454                                      f["architecture"], f["source package"],
455                                      f["source version"]])
456                     debinfo.write(line+"\n")
457             debinfo.close()
458             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
459                                   changes_file[:-8]+".debinfo")
460             os.rename(temp_filename, filename)
461
462         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
463
464     ###########################################################################
465
466     def queue_build (self, queue, path):
467         Cnf = self.Cnf
468         Subst = self.Subst
469         files = self.pkg.files
470         changes = self.pkg.changes
471         changes_file = self.pkg.changes_file
472         dsc = self.pkg.dsc
473         file_keys = files.keys()
474
475         ## Special support to enable clean auto-building of queued packages
476         queue_id = database.get_or_set_queue_id(queue)
477
478         self.projectB.query("BEGIN WORK")
479         for suite in changes["distribution"].keys():
480             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
481                 continue
482             suite_id = database.get_suite_id(suite)
483             dest_dir = Cnf["Dir::QueueBuild"]
484             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
485                 dest_dir = os.path.join(dest_dir, suite)
486             for file in file_keys:
487                 src = os.path.join(path, file)
488                 dest = os.path.join(dest_dir, file)
489                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
490                     # Copy it since the original won't be readable by www-data
491                     utils.copy(src, dest)
492                 else:
493                     # Create a symlink to it
494                     os.symlink(src, dest)
495                 # Add it to the list of packages for later processing by apt-ftparchive
496                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
497             # If the .orig.tar.gz is in the pool, create a symlink to
498             # it (if one doesn't already exist)
499             if self.pkg.orig_tar_id:
500                 # Determine the .orig.tar.gz file name
501                 for dsc_file in self.pkg.dsc_files.keys():
502                     if dsc_file.endswith(".orig.tar.gz"):
503                         filename = dsc_file
504                 dest = os.path.join(dest_dir, filename)
505                 # If it doesn't exist, create a symlink
506                 if not os.path.exists(dest):
507                     # Find the .orig.tar.gz in the pool
508                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
509                     ql = q.getresult()
510                     if not ql:
511                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
512                     src = os.path.join(ql[0][0], ql[0][1])
513                     os.symlink(src, dest)
514                     # Add it to the list of packages for later processing by apt-ftparchive
515                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
516                 # if it does, update things to ensure it's not removed prematurely
517                 else:
518                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
519
520         self.projectB.query("COMMIT WORK")
521
522     ###########################################################################
523
524     def check_override (self):
525         Subst = self.Subst
526         changes = self.pkg.changes
527         files = self.pkg.files
528         Cnf = self.Cnf
529
530         # Abandon the check if:
531         #  a) it's a non-sourceful upload
532         #  b) override disparity checks have been disabled
533         #  c) we're not sending mail
534         if not changes["architecture"].has_key("source") or \
535            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
536            Cnf["Dinstall::Options::No-Mail"]:
537             return
538
539         summary = ""
540         file_keys = files.keys()
541         file_keys.sort()
542         for file in file_keys:
543             if not files[file].has_key("new") and files[file]["type"] == "deb":
544                 section = files[file]["section"]
545                 override_section = files[file]["override section"]
546                 if section.lower() != override_section.lower() and section != "-":
547                     # Ignore this; it's a common mistake and not worth whining about
548                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
549                         continue
550                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
551                 priority = files[file]["priority"]
552                 override_priority = files[file]["override priority"]
553                 if priority != override_priority and priority != "-":
554                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
555
556         if summary == "":
557             return
558
559         Subst["__SUMMARY__"] = summary
560         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
561         utils.send_mail(mail_message)
562
563     ###########################################################################
564
565     def force_reject (self, files):
566         """Forcefully move files from the current directory to the
567            reject directory.  If any file already exists in the reject
568            directory it will be moved to the morgue to make way for
569            the new file."""
570
571         Cnf = self.Cnf
572
573         for file in files:
574             # Skip any files which don't exist or which we don't have permission to copy.
575             if os.access(file,os.R_OK) == 0:
576                 continue
577             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
578             try:
579                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
580             except OSError, e:
581                 # File exists?  Let's try and move it to the morgue
582                 if errno.errorcode[e.errno] == 'EEXIST':
583                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
584                     try:
585                         morgue_file = utils.find_next_free(morgue_file)
586                     except utils.tried_too_hard_exc:
587                         # Something's either gone badly Pete Tong, or
588                         # someone is trying to exploit us.
589                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
590                         return
591                     utils.move(dest_file, morgue_file, perms=0660)
592                     try:
593                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
594                     except OSError, e:
595                         # Likewise
596                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
597                         return
598                 else:
599                     raise
600             # If we got here, we own the destination file, so we can
601             # safely overwrite it.
602             utils.move(file, dest_file, 1, perms=0660)
603             os.close(dest_fd)
604
605     ###########################################################################
606
607     def do_reject (self, manual = 0, reject_message = ""):
608         # If we weren't given a manual rejection message, spawn an
609         # editor so the user can add one in...
610         if manual and not reject_message:
611             temp_filename = utils.temp_filename()
612             editor = os.environ.get("EDITOR","vi")
613             answer = 'E'
614             while answer == 'E':
615                 os.system("%s %s" % (editor, temp_filename))
616                 temp_fh = utils.open_file(temp_filename)
617                 reject_message = "".join(temp_fh.readlines())
618                 temp_fh.close()
619                 print "Reject message:"
620                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
621                 prompt = "[R]eject, Edit, Abandon, Quit ?"
622                 answer = "XXX"
623                 while prompt.find(answer) == -1:
624                     answer = utils.our_raw_input(prompt)
625                     m = re_default_answer.search(prompt)
626                     if answer == "":
627                         answer = m.group(1)
628                     answer = answer[:1].upper()
629             os.unlink(temp_filename)
630             if answer == 'A':
631                 return 1
632             elif answer == 'Q':
633                 sys.exit(0)
634
635         print "Rejecting.\n"
636
637         Cnf = self.Cnf
638         Subst = self.Subst
639         pkg = self.pkg
640
641         reason_filename = pkg.changes_file[:-8] + ".reason"
642         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
643
644         # Move all the files into the reject directory
645         reject_files = pkg.files.keys() + [pkg.changes_file]
646         self.force_reject(reject_files)
647
648         # If we fail here someone is probably trying to exploit the race
649         # so let's just raise an exception ...
650         if os.path.exists(reason_filename):
651             os.unlink(reason_filename)
652         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
653
654         if not manual:
655             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
656             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
657             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
658             os.write(reason_fd, reject_message)
659             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
660         else:
661             # Build up the rejection email
662             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
663
664             Subst["__REJECTOR_ADDRESS__"] = user_email_address
665             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
666             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
667             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
668             # Write the rejection email out as the <foo>.reason file
669             os.write(reason_fd, reject_mail_message)
670
671         os.close(reason_fd)
672
673         # Send the rejection mail if appropriate
674         if not Cnf["Dinstall::Options::No-Mail"]:
675             utils.send_mail(reject_mail_message)
676
677         self.Logger.log(["rejected", pkg.changes_file])
678         return 0
679
680     ################################################################################
681
682     # Ensure that source exists somewhere in the archive for the binary
683     # upload being processed.
684     #
685     # (1) exact match                      => 1.0-3
686     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
687
688     def source_exists (self, package, source_version, suites = ["any"]):
689         okay = 1
690         for suite in suites:
691             if suite == "any":
692                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
693                     (package)
694             else:
695                 # source must exist in suite X, or in some other suite that's
696                 # mapped to X, recursively... silent-maps are counted too,
697                 # unreleased-maps aren't.
698                 maps = self.Cnf.ValueList("SuiteMappings")[:]
699                 maps.reverse()
700                 maps = [ m.split() for m in maps ]
701                 maps = [ (x[1], x[2]) for x in maps
702                                 if x[0] == "map" or x[0] == "silent-map" ]
703                 s = [suite]
704                 for x in maps:
705                         if x[1] in s and x[0] not in s:
706                                 s.append(x[0])
707
708                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
709             q = self.projectB.query(que)
710
711             # Reduce the query results to a list of version numbers
712             ql = [ i[0] for i in q.getresult() ]
713
714             # Try (1)
715             if source_version in ql:
716                 continue
717
718             # Try (2)
719             orig_source_version = re_bin_only_nmu.sub('', source_version)
720             if orig_source_version in ql:
721                 continue
722
723             # No source found...
724             okay = 0
725             break
726         return okay
727
728     ################################################################################
729     
730     def in_override_p (self, package, component, suite, binary_type, file):
731         files = self.pkg.files
732
733         if binary_type == "": # must be source
734             type = "dsc"
735         else:
736             type = binary_type
737
738         # Override suite name; used for example with proposed-updates
739         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
740             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
741
742         # Avoid <undef> on unknown distributions
743         suite_id = database.get_suite_id(suite)
744         if suite_id == -1:
745             return None
746         component_id = database.get_component_id(component)
747         type_id = database.get_override_type_id(type)
748
749         # FIXME: nasty non-US speficic hack
750         if component.lower().startswith("non-us/"):
751             component = component[7:]
752
753         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
754                            % (package, suite_id, component_id, type_id))
755         result = q.getresult()
756         # If checking for a source package fall back on the binary override type
757         if type == "dsc" and not result:
758             deb_type_id = database.get_override_type_id("deb")
759             udeb_type_id = database.get_override_type_id("udeb")
760             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
761                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
762             result = q.getresult()
763
764         # Remember the section and priority so we can check them later if appropriate
765         if result:
766             files[file]["override section"] = result[0][0]
767             files[file]["override priority"] = result[0][1]
768
769         return result
770
771     ################################################################################
772
773     def reject (self, str, prefix="Rejected: "):
774         if str:
775             # Unlike other rejects we add new lines first to avoid trailing
776             # new lines when this message is passed back up to a caller.
777             if self.reject_message:
778                 self.reject_message += "\n"
779             self.reject_message += prefix + str
780
781     ################################################################################
782
783     def get_anyversion(self, query_result, suite):
784         anyversion=None
785         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
786         for (v, s) in query_result:
787             if s in [ x.lower() for x in anysuite ]:
788                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
789                     anyversion=v
790         return anyversion
791
792     ################################################################################
793
794     def cross_suite_version_check(self, query_result, file, new_version):
795         """Ensure versions are newer than existing packages in target
796         suites and that cross-suite version checking rules as
797         set out in the conf file are satisfied."""
798
799         # Check versions for each target suite
800         for target_suite in self.pkg.changes["distribution"].keys():
801             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
802             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
803             # Enforce "must be newer than target suite" even if conffile omits it
804             if target_suite not in must_be_newer_than:
805                 must_be_newer_than.append(target_suite)
806             for entry in query_result:
807                 existent_version = entry[0]
808                 suite = entry[1]
809                 if suite in must_be_newer_than and \
810                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
811                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
812                 if suite in must_be_older_than and \
813                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
814                     ch = self.pkg.changes
815                     cansave = 0
816                     if ch.get('distribution-version', {}).has_key(suite):
817                         # we really use the other suite, ignoring the conflicting one ...
818                         addsuite = ch["distribution-version"][suite]
819                     
820                         add_version = self.get_anyversion(query_result, addsuite)
821                         target_version = self.get_anyversion(query_result, target_suite)
822                     
823                         if not add_version:
824                             # not add_version can only happen if we map to a suite
825                             # that doesn't enhance the suite we're propup'ing from.
826                             # so "propup-ver x a b c; map a d" is a problem only if
827                             # d doesn't enhance a.
828                             #
829                             # i think we could always propagate in this case, rather
830                             # than complaining. either way, this isn't a REJECT issue
831                             #
832                             # And - we really should complain to the dorks who configured dak
833                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
834                             self.pkg.changes.setdefault("propdistribution", {})
835                             self.pkg.changes["propdistribution"][addsuite] = 1
836                             cansave = 1
837                         elif not target_version:
838                             # not targets_version is true when the package is NEW
839                             # we could just stick with the "...old version..." REJECT
840                             # for this, I think.
841                             self.reject("Won't propogate NEW packages.")
842                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
843                             # propogation would be redundant. no need to reject though.
844                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
845                             cansave = 1
846                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
847                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
848                             # propogate!!
849                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
850                             self.pkg.changes.setdefault("propdistribution", {})
851                             self.pkg.changes["propdistribution"][addsuite] = 1
852                             cansave = 1
853                 
854                     if not cansave:
855                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
856
857     ################################################################################
858
859     def check_binary_against_db(self, file):
860         self.reject_message = ""
861         files = self.pkg.files
862
863         # Ensure version is sane
864         q = self.projectB.query("""
865 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
866                                      architecture a
867  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
868    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
869                                 % (files[file]["package"],
870                                    files[file]["architecture"]))
871         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
872
873         # Check for any existing copies of the file
874         q = self.projectB.query("""
875 SELECT b.id FROM binaries b, architecture a
876  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
877    AND a.id = b.architecture"""
878                                 % (files[file]["package"],
879                                    files[file]["version"],
880                                    files[file]["architecture"]))
881         if q.getresult():
882             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
883
884         return self.reject_message
885
886     ################################################################################
887
888     def check_source_against_db(self, file):
889         self.reject_message = ""
890         dsc = self.pkg.dsc
891
892         # Ensure version is sane
893         q = self.projectB.query("""
894 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
895  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
896         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
897
898         return self.reject_message
899
900     ################################################################################
901
902     # **WARNING**
903     # NB: this function can remove entries from the 'files' index [if
904     # the .orig.tar.gz is a duplicate of the one in the archive]; if
905     # you're iterating over 'files' and call this function as part of
906     # the loop, be sure to add a check to the top of the loop to
907     # ensure you haven't just tried to derefernece the deleted entry.
908     # **WARNING**
909
910     def check_dsc_against_db(self, file):
911         self.reject_message = ""
912         files = self.pkg.files
913         dsc_files = self.pkg.dsc_files
914         legacy_source_untouchable = self.pkg.legacy_source_untouchable
915         self.pkg.orig_tar_gz = None
916
917         # Try and find all files mentioned in the .dsc.  This has
918         # to work harder to cope with the multiple possible
919         # locations of an .orig.tar.gz.
920         for dsc_file in dsc_files.keys():
921             found = None
922             if files.has_key(dsc_file):
923                 actual_md5 = files[dsc_file]["md5sum"]
924                 actual_size = int(files[dsc_file]["size"])
925                 found = "%s in incoming" % (dsc_file)
926                 # Check the file does not already exist in the archive
927                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
928                 ql = q.getresult()
929                 # Strip out anything that isn't '%s' or '/%s$'
930                 for i in ql:
931                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
932                         ql.remove(i)
933
934                 # "[dak] has not broken them.  [dak] has fixed a
935                 # brokenness.  Your crappy hack exploited a bug in
936                 # the old dinstall.
937                 #
938                 # "(Come on!  I thought it was always obvious that
939                 # one just doesn't release different files with
940                 # the same name and version.)"
941                 #                        -- ajk@ on d-devel@l.d.o
942
943                 if ql:
944                     # Ignore exact matches for .orig.tar.gz
945                     match = 0
946                     if dsc_file.endswith(".orig.tar.gz"):
947                         for i in ql:
948                             if files.has_key(dsc_file) and \
949                                int(files[dsc_file]["size"]) == int(i[0]) and \
950                                files[dsc_file]["md5sum"] == i[1]:
951                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
952                                 del files[dsc_file]
953                                 self.pkg.orig_tar_gz = i[2] + i[3]
954                                 match = 1
955
956                     if not match:
957                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
958             elif dsc_file.endswith(".orig.tar.gz"):
959                 # Check in the pool
960                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
961                 ql = q.getresult()
962                 # Strip out anything that isn't '%s' or '/%s$'
963                 for i in ql:
964                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
965                         ql.remove(i)
966
967                 if ql:
968                     # Unfortunately, we may get more than one match here if,
969                     # for example, the package was in potato but had an -sa
970                     # upload in woody.  So we need to choose the right one.
971
972                     x = ql[0]; # default to something sane in case we don't match any or have only one
973
974                     if len(ql) > 1:
975                         for i in ql:
976                             old_file = i[0] + i[1]
977                             old_file_fh = utils.open_file(old_file)
978                             actual_md5 = apt_pkg.md5sum(old_file_fh)
979                             old_file_fh.close()
980                             actual_size = os.stat(old_file)[stat.ST_SIZE]
981                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
982                                 x = i
983                             else:
984                                 legacy_source_untouchable[i[3]] = ""
985
986                     old_file = x[0] + x[1]
987                     old_file_fh = utils.open_file(old_file)
988                     actual_md5 = apt_pkg.md5sum(old_file_fh)
989                     old_file_fh.close()
990                     actual_size = os.stat(old_file)[stat.ST_SIZE]
991                     found = old_file
992                     suite_type = x[2]
993                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
994                     # See install() in process-accepted...
995                     self.pkg.orig_tar_id = x[3]
996                     self.pkg.orig_tar_gz = old_file
997                     if suite_type == "legacy" or suite_type == "legacy-mixed":
998                         self.pkg.orig_tar_location = "legacy"
999                     else:
1000                         self.pkg.orig_tar_location = x[4]
1001                 else:
1002                     # Not there? Check the queue directories...
1003
1004                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1005                     # See process_it() in 'dak process-unchecked' for explanation of this
1006                     if os.path.exists(in_unchecked):
1007                         return (self.reject_message, in_unchecked)
1008                     else:
1009                         for dir in [ "Accepted", "New", "Byhand" ]:
1010                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1011                             if os.path.exists(in_otherdir):
1012                                 in_otherdir_fh = utils.open_file(in_otherdir)
1013                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1014                                 in_otherdir_fh.close()
1015                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1016                                 found = in_otherdir
1017                                 self.pkg.orig_tar_gz = in_otherdir
1018
1019                     if not found:
1020                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1021                         self.pkg.orig_tar_gz = -1
1022                         continue
1023             else:
1024                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1025                 continue
1026             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1027                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1028             if actual_size != int(dsc_files[dsc_file]["size"]):
1029                 self.reject("size for %s doesn't match %s." % (found, file))
1030
1031         return (self.reject_message, None)
1032
1033     def do_query(self, q):
1034         sys.stderr.write("query: \"%s\" ... " % (q))
1035         before = time.time()
1036         r = self.projectB.query(q)
1037         time_diff = time.time()-before
1038         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1039         return r