]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ################################################################################
36
37 # Determine what parts in a .changes are NEW
38
39 def determine_new(changes, files, projectB, warn=1):
40     new = {}
41
42     # Build up a list of potentially new things
43     for file in files.keys():
44         f = files[file]
45         # Skip byhand elements
46         if f["type"] == "byhand":
47             continue
48         pkg = f["package"]
49         priority = f["priority"]
50         section = f["section"]
51         type = get_type(f)
52         component = f["component"]
53
54         if type == "dsc":
55             priority = "source"
56         if not new.has_key(pkg):
57             new[pkg] = {}
58             new[pkg]["priority"] = priority
59             new[pkg]["section"] = section
60             new[pkg]["type"] = type
61             new[pkg]["component"] = component
62             new[pkg]["files"] = []
63         else:
64             old_type = new[pkg]["type"]
65             if old_type != type:
66                 # source gets trumped by deb or udeb
67                 if old_type == "dsc":
68                     new[pkg]["priority"] = priority
69                     new[pkg]["section"] = section
70                     new[pkg]["type"] = type
71                     new[pkg]["component"] = component
72         new[pkg]["files"].append(file)
73         if f.has_key("othercomponents"):
74             new[pkg]["othercomponents"] = f["othercomponents"]
75
76     for suite in changes["suite"].keys():
77         suite_id = database.get_suite_id(suite)
78         for pkg in new.keys():
79             component_id = database.get_component_id(new[pkg]["component"])
80             type_id = database.get_override_type_id(new[pkg]["type"])
81             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
82             ql = q.getresult()
83             if ql:
84                 for file in new[pkg]["files"]:
85                     if files[file].has_key("new"):
86                         del files[file]["new"]
87                 del new[pkg]
88
89     if warn:
90         if changes["suite"].has_key("stable"):
91             print "WARNING: overrides will be added for stable!"
92             if changes["suite"].has_key("oldstable"):
93                 print "WARNING: overrides will be added for OLDstable!"
94         for pkg in new.keys():
95             if new[pkg].has_key("othercomponents"):
96                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
97
98     return new
99
100 ################################################################################
101
102 def get_type(f):
103     # Determine the type
104     if f.has_key("dbtype"):
105         type = f["dbtype"]
106     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
107         type = "dsc"
108     else:
109         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (type))
110
111     # Validate the override type
112     type_id = database.get_override_type_id(type)
113     if type_id == -1:
114         utils.fubar("invalid type (%s) for new.  Say wha?" % (type))
115
116     return type
117
118 ################################################################################
119
120 # check if section/priority values are valid
121
122 def check_valid(new):
123     for pkg in new.keys():
124         section = new[pkg]["section"]
125         priority = new[pkg]["priority"]
126         type = new[pkg]["type"]
127         new[pkg]["section id"] = database.get_section_id(section)
128         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
129         # Sanity checks
130         di = section.find("debian-installer") != -1
131         if (di and type != "udeb") or (not di and type == "udeb"):
132             new[pkg]["section id"] = -1
133         if (priority == "source" and type != "dsc") or \
134            (priority != "source" and type == "dsc"):
135             new[pkg]["priority id"] = -1
136
137
138 ###############################################################################
139
140 # Convenience wrapper to carry around all the package information in
141
142 class Pkg:
143     def __init__(self, **kwds):
144         self.__dict__.update(kwds)
145
146     def update(self, **kwds):
147         self.__dict__.update(kwds)
148
149 ###############################################################################
150
151 class Upload:
152
153     def __init__(self, Cnf):
154         self.Cnf = Cnf
155         self.accept_count = 0
156         self.accept_bytes = 0L
157         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158                        legacy_source_untouchable = {})
159
160         # Initialize the substitution template mapping global
161         Subst = self.Subst = {}
162         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
166
167         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168         database.init(Cnf, self.projectB)
169
170     ###########################################################################
171
172     def init_vars (self):
173         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174             exec "self.pkg.%s.clear();" % (i)
175         self.pkg.orig_tar_id = None
176         self.pkg.orig_tar_location = ""
177         self.pkg.orig_tar_gz = None
178
179     ###########################################################################
180
181     def update_vars (self):
182         dump_filename = self.pkg.changes_file[:-8]+".dak"
183         dump_file = utils.open_file(dump_filename)
184         p = cPickle.Unpickler(dump_file)
185         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186             exec "self.pkg.%s.update(p.load());" % (i)
187         for i in [ "orig_tar_id", "orig_tar_location" ]:
188             exec "self.pkg.%s = p.load();" % (i)
189         dump_file.close()
190
191     ###########################################################################
192
193     # This could just dump the dictionaries as is, but I'd like to
194     # avoid this so there's some idea of what process-accepted &
195     # process-new use from process-unchecked
196
197     def dump_vars(self, dest_dir):
198         for i in [ "changes", "dsc", "files", "dsc_files",
199                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200             exec "%s = self.pkg.%s;" % (i,i)
201         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202         dump_file = utils.open_file(dump_filename, 'w')
203         try:
204             os.chmod(dump_filename, 0660)
205         except OSError, e:
206             if errno.errorcode[e.errno] == 'EPERM':
207                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208                 if perms & stat.S_IROTH:
209                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
210             else:
211                 raise
212
213         p = cPickle.Pickler(dump_file, 1)
214         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
215             exec "%s = {}" % i
216         ## files
217         for file in files.keys():
218             d_files[file] = {}
219             for i in [ "package", "version", "architecture", "type", "size",
220                        "md5sum", "component", "location id", "source package",
221                        "source version", "maintainer", "dbtype", "files id",
222                        "new", "section", "priority", "othercomponents",
223                        "pool name", "original component" ]:
224                 if files[file].has_key(i):
225                     d_files[file][i] = files[file][i]
226         ## changes
227         # Mandatory changes fields
228         for i in [ "distribution", "source", "architecture", "version",
229                    "maintainer", "urgency", "fingerprint", "changedby822",
230                    "changedby2047", "changedbyname", "maintainer822",
231                    "maintainer2047", "maintainername", "maintaineremail",
232                    "closes", "changes" ]:
233             d_changes[i] = changes[i]
234         # Optional changes fields
235         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
236                    "sponsoremail" ]:
237             if changes.has_key(i):
238                 d_changes[i] = changes[i]
239         ## dsc
240         for i in [ "source", "version", "maintainer", "fingerprint",
241                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
242             if dsc.has_key(i):
243                 d_dsc[i] = dsc[i]
244         ## dsc_files
245         for file in dsc_files.keys():
246             d_dsc_files[file] = {}
247             # Mandatory dsc_files fields
248             for i in [ "size", "md5sum" ]:
249                 d_dsc_files[file][i] = dsc_files[file][i]
250             # Optional dsc_files fields
251             for i in [ "files id" ]:
252                 if dsc_files[file].has_key(i):
253                     d_dsc_files[file][i] = dsc_files[file][i]
254
255         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
256                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
257             p.dump(i)
258         dump_file.close()
259
260     ###########################################################################
261
262     # Set up the per-package template substitution mappings
263
264     def update_subst (self, reject_message = ""):
265         Subst = self.Subst
266         changes = self.pkg.changes
267         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
268         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
269             changes["architecture"] = { "Unknown" : "" }
270         # and maintainer2047 may not exist.
271         if not changes.has_key("maintainer2047"):
272             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
273
274         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
275         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
276         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
277
278         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
279         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
280             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
281             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
282                                                      changes["maintainer2047"])
283             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
284         else:
285             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
286             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
287             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
288
289         if "sponsoremail" in changes:
290             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
291
292         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
293             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
294
295         # Apply any global override of the Maintainer field
296         if self.Cnf.get("Dinstall::OverrideMaintainer"):
297             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
298             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
299
300         Subst["__REJECT_MESSAGE__"] = reject_message
301         Subst["__SOURCE__"] = changes.get("source", "Unknown")
302         Subst["__VERSION__"] = changes.get("version", "Unknown")
303
304     ###########################################################################
305
306     def build_summaries(self):
307         changes = self.pkg.changes
308         files = self.pkg.files
309
310         byhand = summary = new = ""
311
312         # changes["distribution"] may not exist in corner cases
313         # (e.g. unreadable changes files)
314         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
315             changes["distribution"] = {}
316
317         override_summary ="";
318         file_keys = files.keys()
319         file_keys.sort()
320         for file in file_keys:
321             if files[file].has_key("byhand"):
322                 byhand = 1
323                 summary += file + " byhand\n"
324             elif files[file].has_key("new"):
325                 new = 1
326                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
327                 if files[file].has_key("othercomponents"):
328                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
329                 if files[file]["type"] == "deb":
330                     deb_fh = utils.open_file(file)
331                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
332                     deb_fh.close()
333             else:
334                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
335                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
336                 summary += file + "\n  to " + destination + "\n"
337                 if not files[file].has_key("type"):
338                     files[file]["type"] = "unknown"
339                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
340                     # (queue/unchecked), there we have override entries already, use them
341                     # (process-new), there we dont have override entries, use the newly generated ones.
342                     override_prio = files[file].get("override priority", files[file]["priority"])
343                     override_sect = files[file].get("override section", files[file]["section"])
344                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
345
346         short_summary = summary
347
348         # This is for direport's benefit...
349         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
350
351         if byhand or new:
352             summary += "Changes: " + f
353
354         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
355
356         summary += self.announce(short_summary, 0)
357
358         return (summary, short_summary)
359
360     ###########################################################################
361
362     def close_bugs (self, summary, action):
363         changes = self.pkg.changes
364         Subst = self.Subst
365         Cnf = self.Cnf
366
367         bugs = changes["closes"].keys()
368
369         if not bugs:
370             return summary
371
372         bugs.sort()
373         summary += "Closing bugs: "
374         for bug in bugs:
375             summary += "%s " % (bug)
376             if action:
377                 Subst["__BUG_NUMBER__"] = bug
378                 if changes["distribution"].has_key("stable"):
379                     Subst["__STABLE_WARNING__"] = """
380 Note that this package is not part of the released stable Debian
381 distribution.  It may have dependencies on other unreleased software,
382 or other instabilities.  Please take care if you wish to install it.
383 The update will eventually make its way into the next released Debian
384 distribution."""
385                 else:
386                     Subst["__STABLE_WARNING__"] = ""
387                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
388                     utils.send_mail (mail_message)
389         if action:
390             self.Logger.log(["closing bugs"]+bugs)
391         summary += "\n"
392
393         return summary
394
395     ###########################################################################
396
397     def announce (self, short_summary, action):
398         Subst = self.Subst
399         Cnf = self.Cnf
400         changes = self.pkg.changes
401
402         # Only do announcements for source uploads with a recent dpkg-dev installed
403         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
404             return ""
405
406         lists_done = {}
407         summary = ""
408         Subst["__SHORT_SUMMARY__"] = short_summary
409
410         for dist in changes["distribution"].keys():
411             list = Cnf.Find("Suite::%s::Announce" % (dist))
412             if list == "" or lists_done.has_key(list):
413                 continue
414             lists_done[list] = 1
415             summary += "Announcing to %s\n" % (list)
416
417             if action:
418                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
419                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
420                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
421                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
422                 utils.send_mail (mail_message)
423
424         if Cnf.FindB("Dinstall::CloseBugs"):
425             summary = self.close_bugs(summary, action)
426
427         return summary
428
429     ###########################################################################
430
431     def accept (self, summary, short_summary):
432         Cnf = self.Cnf
433         Subst = self.Subst
434         files = self.pkg.files
435         changes = self.pkg.changes
436         changes_file = self.pkg.changes_file
437         dsc = self.pkg.dsc
438
439         print "Accepting."
440         self.Logger.log(["Accepting changes",changes_file])
441
442         self.dump_vars(Cnf["Dir::Queue::Accepted"])
443
444         # Move all the files into the accepted directory
445         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
446         file_keys = files.keys()
447         for file in file_keys:
448             utils.move(file, Cnf["Dir::Queue::Accepted"])
449             self.accept_bytes += float(files[file]["size"])
450         self.accept_count += 1
451
452         # Send accept mail, announce to lists, close bugs and check for
453         # override disparities
454         if not Cnf["Dinstall::Options::No-Mail"]:
455             Subst["__SUITE__"] = ""
456             Subst["__SUMMARY__"] = summary
457             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
458             utils.send_mail(mail_message)
459             self.announce(short_summary, 1)
460
461
462         ## Helper stuff for DebBugs Version Tracking
463         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
464             # ??? once queue/* is cleared on *.d.o and/or reprocessed
465             # the conditionalization on dsc["bts changelog"] should be
466             # dropped.
467
468             # Write out the version history from the changelog
469             if changes["architecture"].has_key("source") and \
470                dsc.has_key("bts changelog"):
471
472                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
473                                                     dotprefix=1, perms=0644)
474                 version_history = utils.open_file(temp_filename, 'w')
475                 version_history.write(dsc["bts changelog"])
476                 version_history.close()
477                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
478                                       changes_file[:-8]+".versions")
479                 os.rename(temp_filename, filename)
480
481             # Write out the binary -> source mapping.
482             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
483                                                 dotprefix=1, perms=0644)
484             debinfo = utils.open_file(temp_filename, 'w')
485             for file in file_keys:
486                 f = files[file]
487                 if f["type"] == "deb":
488                     line = " ".join([f["package"], f["version"],
489                                      f["architecture"], f["source package"],
490                                      f["source version"]])
491                     debinfo.write(line+"\n")
492             debinfo.close()
493             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
494                                   changes_file[:-8]+".debinfo")
495             os.rename(temp_filename, filename)
496
497         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
498
499     ###########################################################################
500
501     def queue_build (self, queue, path):
502         Cnf = self.Cnf
503         Subst = self.Subst
504         files = self.pkg.files
505         changes = self.pkg.changes
506         changes_file = self.pkg.changes_file
507         dsc = self.pkg.dsc
508         file_keys = files.keys()
509
510         ## Special support to enable clean auto-building of queued packages
511         queue_id = database.get_or_set_queue_id(queue)
512
513         self.projectB.query("BEGIN WORK")
514         for suite in changes["distribution"].keys():
515             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
516                 continue
517             suite_id = database.get_suite_id(suite)
518             dest_dir = Cnf["Dir::QueueBuild"]
519             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
520                 dest_dir = os.path.join(dest_dir, suite)
521             for file in file_keys:
522                 src = os.path.join(path, file)
523                 dest = os.path.join(dest_dir, file)
524                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
525                     # Copy it since the original won't be readable by www-data
526                     utils.copy(src, dest)
527                 else:
528                     # Create a symlink to it
529                     os.symlink(src, dest)
530                 # Add it to the list of packages for later processing by apt-ftparchive
531                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
532             # If the .orig.tar.gz is in the pool, create a symlink to
533             # it (if one doesn't already exist)
534             if self.pkg.orig_tar_id:
535                 # Determine the .orig.tar.gz file name
536                 for dsc_file in self.pkg.dsc_files.keys():
537                     if dsc_file.endswith(".orig.tar.gz"):
538                         filename = dsc_file
539                 dest = os.path.join(dest_dir, filename)
540                 # If it doesn't exist, create a symlink
541                 if not os.path.exists(dest):
542                     # Find the .orig.tar.gz in the pool
543                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
544                     ql = q.getresult()
545                     if not ql:
546                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
547                     src = os.path.join(ql[0][0], ql[0][1])
548                     os.symlink(src, dest)
549                     # Add it to the list of packages for later processing by apt-ftparchive
550                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
551                 # if it does, update things to ensure it's not removed prematurely
552                 else:
553                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
554
555         self.projectB.query("COMMIT WORK")
556
557     ###########################################################################
558
559     def check_override (self):
560         Subst = self.Subst
561         changes = self.pkg.changes
562         files = self.pkg.files
563         Cnf = self.Cnf
564
565         # Abandon the check if:
566         #  a) it's a non-sourceful upload
567         #  b) override disparity checks have been disabled
568         #  c) we're not sending mail
569         if not changes["architecture"].has_key("source") or \
570            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
571            Cnf["Dinstall::Options::No-Mail"]:
572             return
573
574         summary = ""
575         file_keys = files.keys()
576         file_keys.sort()
577         for file in file_keys:
578             if not files[file].has_key("new") and files[file]["type"] == "deb":
579                 section = files[file]["section"]
580                 override_section = files[file]["override section"]
581                 if section.lower() != override_section.lower() and section != "-":
582                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
583                 priority = files[file]["priority"]
584                 override_priority = files[file]["override priority"]
585                 if priority != override_priority and priority != "-":
586                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
587
588         if summary == "":
589             return
590
591         Subst["__SUMMARY__"] = summary
592         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
593         utils.send_mail(mail_message)
594
595     ###########################################################################
596
597     def force_reject (self, files):
598         """Forcefully move files from the current directory to the
599            reject directory.  If any file already exists in the reject
600            directory it will be moved to the morgue to make way for
601            the new file."""
602
603         Cnf = self.Cnf
604
605         for file in files:
606             # Skip any files which don't exist or which we don't have permission to copy.
607             if os.access(file,os.R_OK) == 0:
608                 continue
609             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
610             try:
611                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
612             except OSError, e:
613                 # File exists?  Let's try and move it to the morgue
614                 if errno.errorcode[e.errno] == 'EEXIST':
615                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
616                     try:
617                         morgue_file = utils.find_next_free(morgue_file)
618                     except utils.tried_too_hard_exc:
619                         # Something's either gone badly Pete Tong, or
620                         # someone is trying to exploit us.
621                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
622                         return
623                     utils.move(dest_file, morgue_file, perms=0660)
624                     try:
625                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
626                     except OSError, e:
627                         # Likewise
628                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
629                         return
630                 else:
631                     raise
632             # If we got here, we own the destination file, so we can
633             # safely overwrite it.
634             utils.move(file, dest_file, 1, perms=0660)
635             os.close(dest_fd)
636
637     ###########################################################################
638
639     def do_reject (self, manual = 0, reject_message = ""):
640         # If we weren't given a manual rejection message, spawn an
641         # editor so the user can add one in...
642         if manual and not reject_message:
643             temp_filename = utils.temp_filename()
644             editor = os.environ.get("EDITOR","vi")
645             answer = 'E'
646             while answer == 'E':
647                 os.system("%s %s" % (editor, temp_filename))
648                 temp_fh = utils.open_file(temp_filename)
649                 reject_message = "".join(temp_fh.readlines())
650                 temp_fh.close()
651                 print "Reject message:"
652                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
653                 prompt = "[R]eject, Edit, Abandon, Quit ?"
654                 answer = "XXX"
655                 while prompt.find(answer) == -1:
656                     answer = utils.our_raw_input(prompt)
657                     m = re_default_answer.search(prompt)
658                     if answer == "":
659                         answer = m.group(1)
660                     answer = answer[:1].upper()
661             os.unlink(temp_filename)
662             if answer == 'A':
663                 return 1
664             elif answer == 'Q':
665                 sys.exit(0)
666
667         print "Rejecting.\n"
668
669         Cnf = self.Cnf
670         Subst = self.Subst
671         pkg = self.pkg
672
673         reason_filename = pkg.changes_file[:-8] + ".reason"
674         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
675
676         # Move all the files into the reject directory
677         reject_files = pkg.files.keys() + [pkg.changes_file]
678         self.force_reject(reject_files)
679
680         # If we fail here someone is probably trying to exploit the race
681         # so let's just raise an exception ...
682         if os.path.exists(reason_filename):
683             os.unlink(reason_filename)
684         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
685
686         if not manual:
687             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
688             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
689             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
690             os.write(reason_fd, reject_message)
691             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
692         else:
693             # Build up the rejection email
694             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
695
696             Subst["__REJECTOR_ADDRESS__"] = user_email_address
697             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
698             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
699             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
700             # Write the rejection email out as the <foo>.reason file
701             os.write(reason_fd, reject_mail_message)
702
703         os.close(reason_fd)
704
705         # Send the rejection mail if appropriate
706         if not Cnf["Dinstall::Options::No-Mail"]:
707             utils.send_mail(reject_mail_message)
708
709         self.Logger.log(["rejected", pkg.changes_file])
710         return 0
711
712     ################################################################################
713
714     # Ensure that source exists somewhere in the archive for the binary
715     # upload being processed.
716     #
717     # (1) exact match                      => 1.0-3
718     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
719
720     def source_exists (self, package, source_version, suites = ["any"]):
721         okay = 1
722         for suite in suites:
723             if suite == "any":
724                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
725                     (package)
726             else:
727                 # source must exist in suite X, or in some other suite that's
728                 # mapped to X, recursively... silent-maps are counted too,
729                 # unreleased-maps aren't.
730                 maps = self.Cnf.ValueList("SuiteMappings")[:]
731                 maps.reverse()
732                 maps = [ m.split() for m in maps ]
733                 maps = [ (x[1], x[2]) for x in maps
734                                 if x[0] == "map" or x[0] == "silent-map" ]
735                 s = [suite]
736                 for x in maps:
737                         if x[1] in s and x[0] not in s:
738                                 s.append(x[0])
739
740                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
741             q = self.projectB.query(que)
742
743             # Reduce the query results to a list of version numbers
744             ql = [ i[0] for i in q.getresult() ]
745
746             # Try (1)
747             if source_version in ql:
748                 continue
749
750             # Try (2)
751             orig_source_version = re_bin_only_nmu.sub('', source_version)
752             if orig_source_version in ql:
753                 continue
754
755             # No source found...
756             okay = 0
757             break
758         return okay
759
760     ################################################################################
761     
762     def in_override_p (self, package, component, suite, binary_type, file):
763         files = self.pkg.files
764
765         if binary_type == "": # must be source
766             type = "dsc"
767         else:
768             type = binary_type
769
770         # Override suite name; used for example with proposed-updates
771         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
772             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
773
774         # Avoid <undef> on unknown distributions
775         suite_id = database.get_suite_id(suite)
776         if suite_id == -1:
777             return None
778         component_id = database.get_component_id(component)
779         type_id = database.get_override_type_id(type)
780
781         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
782                            % (package, suite_id, component_id, type_id))
783         result = q.getresult()
784         # If checking for a source package fall back on the binary override type
785         if type == "dsc" and not result:
786             deb_type_id = database.get_override_type_id("deb")
787             udeb_type_id = database.get_override_type_id("udeb")
788             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
789                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
790             result = q.getresult()
791
792         # Remember the section and priority so we can check them later if appropriate
793         if result:
794             files[file]["override section"] = result[0][0]
795             files[file]["override priority"] = result[0][1]
796
797         return result
798
799     ################################################################################
800
801     def reject (self, str, prefix="Rejected: "):
802         if str:
803             # Unlike other rejects we add new lines first to avoid trailing
804             # new lines when this message is passed back up to a caller.
805             if self.reject_message:
806                 self.reject_message += "\n"
807             self.reject_message += prefix + str
808
809     ################################################################################
810
811     def get_anyversion(self, query_result, suite):
812         anyversion=None
813         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
814         for (v, s) in query_result:
815             if s in [ x.lower() for x in anysuite ]:
816                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
817                     anyversion=v
818         return anyversion
819
820     ################################################################################
821
822     def cross_suite_version_check(self, query_result, file, new_version):
823         """Ensure versions are newer than existing packages in target
824         suites and that cross-suite version checking rules as
825         set out in the conf file are satisfied."""
826
827         # Check versions for each target suite
828         for target_suite in self.pkg.changes["distribution"].keys():
829             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
830             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
831             # Enforce "must be newer than target suite" even if conffile omits it
832             if target_suite not in must_be_newer_than:
833                 must_be_newer_than.append(target_suite)
834             for entry in query_result:
835                 existent_version = entry[0]
836                 suite = entry[1]
837                 if suite in must_be_newer_than and \
838                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
839                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
840                 if suite in must_be_older_than and \
841                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
842                     ch = self.pkg.changes
843                     cansave = 0
844                     if ch.get('distribution-version', {}).has_key(suite):
845                         # we really use the other suite, ignoring the conflicting one ...
846                         addsuite = ch["distribution-version"][suite]
847                     
848                         add_version = self.get_anyversion(query_result, addsuite)
849                         target_version = self.get_anyversion(query_result, target_suite)
850                     
851                         if not add_version:
852                             # not add_version can only happen if we map to a suite
853                             # that doesn't enhance the suite we're propup'ing from.
854                             # so "propup-ver x a b c; map a d" is a problem only if
855                             # d doesn't enhance a.
856                             #
857                             # i think we could always propagate in this case, rather
858                             # than complaining. either way, this isn't a REJECT issue
859                             #
860                             # And - we really should complain to the dorks who configured dak
861                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
862                             self.pkg.changes.setdefault("propdistribution", {})
863                             self.pkg.changes["propdistribution"][addsuite] = 1
864                             cansave = 1
865                         elif not target_version:
866                             # not targets_version is true when the package is NEW
867                             # we could just stick with the "...old version..." REJECT
868                             # for this, I think.
869                             self.reject("Won't propogate NEW packages.")
870                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
871                             # propogation would be redundant. no need to reject though.
872                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
873                             cansave = 1
874                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
875                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
876                             # propogate!!
877                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
878                             self.pkg.changes.setdefault("propdistribution", {})
879                             self.pkg.changes["propdistribution"][addsuite] = 1
880                             cansave = 1
881                 
882                     if not cansave:
883                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
884
885     ################################################################################
886
887     def check_binary_against_db(self, file):
888         self.reject_message = ""
889         files = self.pkg.files
890
891         # Ensure version is sane
892         q = self.projectB.query("""
893 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
894                                      architecture a
895  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
896    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
897                                 % (files[file]["package"],
898                                    files[file]["architecture"]))
899         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
900
901         # Check for any existing copies of the file
902         q = self.projectB.query("""
903 SELECT b.id FROM binaries b, architecture a
904  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
905    AND a.id = b.architecture"""
906                                 % (files[file]["package"],
907                                    files[file]["version"],
908                                    files[file]["architecture"]))
909         if q.getresult():
910             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
911
912         return self.reject_message
913
914     ################################################################################
915
916     def check_source_against_db(self, file):
917         self.reject_message = ""
918         dsc = self.pkg.dsc
919
920         # Ensure version is sane
921         q = self.projectB.query("""
922 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
923  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
924         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
925
926         return self.reject_message
927
928     ################################################################################
929
930     # **WARNING**
931     # NB: this function can remove entries from the 'files' index [if
932     # the .orig.tar.gz is a duplicate of the one in the archive]; if
933     # you're iterating over 'files' and call this function as part of
934     # the loop, be sure to add a check to the top of the loop to
935     # ensure you haven't just tried to dereference the deleted entry.
936     # **WARNING**
937
938     def check_dsc_against_db(self, file):
939         self.reject_message = ""
940         files = self.pkg.files
941         dsc_files = self.pkg.dsc_files
942         legacy_source_untouchable = self.pkg.legacy_source_untouchable
943         self.pkg.orig_tar_gz = None
944
945         # Try and find all files mentioned in the .dsc.  This has
946         # to work harder to cope with the multiple possible
947         # locations of an .orig.tar.gz.
948         # The ordering on the select is needed to pick the newest orig
949         # when it exists in multiple places.
950         for dsc_file in dsc_files.keys():
951             found = None
952             if files.has_key(dsc_file):
953                 actual_md5 = files[dsc_file]["md5sum"]
954                 actual_size = int(files[dsc_file]["size"])
955                 found = "%s in incoming" % (dsc_file)
956                 # Check the file does not already exist in the archive
957                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
958                 ql = q.getresult()
959                 # Strip out anything that isn't '%s' or '/%s$'
960                 for i in ql:
961                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
962                         ql.remove(i)
963
964                 # "[dak] has not broken them.  [dak] has fixed a
965                 # brokenness.  Your crappy hack exploited a bug in
966                 # the old dinstall.
967                 #
968                 # "(Come on!  I thought it was always obvious that
969                 # one just doesn't release different files with
970                 # the same name and version.)"
971                 #                        -- ajk@ on d-devel@l.d.o
972
973                 if ql:
974                     # Ignore exact matches for .orig.tar.gz
975                     match = 0
976                     if dsc_file.endswith(".orig.tar.gz"):
977                         for i in ql:
978                             if files.has_key(dsc_file) and \
979                                int(files[dsc_file]["size"]) == int(i[0]) and \
980                                files[dsc_file]["md5sum"] == i[1]:
981                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
982                                 del files[dsc_file]
983                                 self.pkg.orig_tar_gz = i[2] + i[3]
984                                 match = 1
985
986                     if not match:
987                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
988             elif dsc_file.endswith(".orig.tar.gz"):
989                 # Check in the pool
990                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
991                 ql = q.getresult()
992                 # Strip out anything that isn't '%s' or '/%s$'
993                 for i in ql:
994                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
995                         ql.remove(i)
996
997                 if ql:
998                     # Unfortunately, we may get more than one match here if,
999                     # for example, the package was in potato but had an -sa
1000                     # upload in woody.  So we need to choose the right one.
1001
1002                     x = ql[0]; # default to something sane in case we don't match any or have only one
1003
1004                     if len(ql) > 1:
1005                         for i in ql:
1006                             old_file = i[0] + i[1]
1007                             old_file_fh = utils.open_file(old_file)
1008                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1009                             old_file_fh.close()
1010                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1011                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1012                                 x = i
1013                             else:
1014                                 legacy_source_untouchable[i[3]] = ""
1015
1016                     old_file = x[0] + x[1]
1017                     old_file_fh = utils.open_file(old_file)
1018                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1019                     old_file_fh.close()
1020                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1021                     found = old_file
1022                     suite_type = x[2]
1023                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1024                     # See install() in process-accepted...
1025                     self.pkg.orig_tar_id = x[3]
1026                     self.pkg.orig_tar_gz = old_file
1027                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1028                         self.pkg.orig_tar_location = "legacy"
1029                     else:
1030                         self.pkg.orig_tar_location = x[4]
1031                 else:
1032                     # Not there? Check the queue directories...
1033
1034                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1035                     # See process_it() in 'dak process-unchecked' for explanation of this
1036                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1037                     # ever make sense?
1038                     if os.path.exists(in_unchecked) and False:
1039                         return (self.reject_message, in_unchecked)
1040                     else:
1041                         for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1042                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1043                             if os.path.exists(in_otherdir):
1044                                 in_otherdir_fh = utils.open_file(in_otherdir)
1045                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1046                                 in_otherdir_fh.close()
1047                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1048                                 found = in_otherdir
1049                                 self.pkg.orig_tar_gz = in_otherdir
1050
1051                     if not found:
1052                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1053                         self.pkg.orig_tar_gz = -1
1054                         continue
1055             else:
1056                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1057                 continue
1058             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1059                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1060             if actual_size != int(dsc_files[dsc_file]["size"]):
1061                 self.reject("size for %s doesn't match %s." % (found, file))
1062
1063         return (self.reject_message, None)
1064
1065     def do_query(self, q):
1066         sys.stderr.write("query: \"%s\" ... " % (q))
1067         before = time.time()
1068         r = self.projectB.query(q)
1069         time_diff = time.time()-before
1070         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1071         return r