]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Exception handling
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type != "udeb") or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
175             exec "self.pkg.%s.clear();" % (i)
176         self.pkg.orig_tar_id = None
177         self.pkg.orig_tar_location = ""
178         self.pkg.orig_tar_gz = None
179
180     ###########################################################################
181
182     def update_vars (self):
183         dump_filename = self.pkg.changes_file[:-8]+".dak"
184         dump_file = utils.open_file(dump_filename)
185         p = cPickle.Unpickler(dump_file)
186         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
187             exec "self.pkg.%s.update(p.load());" % (i)
188         for i in [ "orig_tar_id", "orig_tar_location" ]:
189             exec "self.pkg.%s = p.load();" % (i)
190         dump_file.close()
191
192     ###########################################################################
193
194     # This could just dump the dictionaries as is, but I'd like to
195     # avoid this so there's some idea of what process-accepted &
196     # process-new use from process-unchecked
197
198     def dump_vars(self, dest_dir):
199         for i in [ "changes", "dsc", "files", "dsc_files",
200                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
201             exec "%s = self.pkg.%s;" % (i,i)
202         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
203         dump_file = utils.open_file(dump_filename, 'w')
204         try:
205             os.chmod(dump_filename, 0660)
206         except OSError, e:
207             if errno.errorcode[e.errno] == 'EPERM':
208                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
209                 if perms & stat.S_IROTH:
210                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
211             else:
212                 raise
213
214         p = cPickle.Pickler(dump_file, 1)
215         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
216             exec "%s = {}" % i
217         ## files
218         for file_entry in files.keys():
219             d_files[file_entry] = {}
220             for i in [ "package", "version", "architecture", "type", "size",
221                        "md5sum", "component", "location id", "source package",
222                        "source version", "maintainer", "dbtype", "files id",
223                        "new", "section", "priority", "othercomponents",
224                        "pool name", "original component" ]:
225                 if files[file_entry].has_key(i):
226                     d_files[file_entry][i] = files[file_entry][i]
227         ## changes
228         # Mandatory changes fields
229         for i in [ "distribution", "source", "architecture", "version",
230                    "maintainer", "urgency", "fingerprint", "changedby822",
231                    "changedby2047", "changedbyname", "maintainer822",
232                    "maintainer2047", "maintainername", "maintaineremail",
233                    "closes", "changes" ]:
234             d_changes[i] = changes[i]
235         # Optional changes fields
236         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
237                    "sponsoremail" ]:
238             if changes.has_key(i):
239                 d_changes[i] = changes[i]
240         ## dsc
241         for i in [ "source", "version", "maintainer", "fingerprint",
242                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
243             if dsc.has_key(i):
244                 d_dsc[i] = dsc[i]
245         ## dsc_files
246         for file_entry in dsc_files.keys():
247             d_dsc_files[file_entry] = {}
248             # Mandatory dsc_files fields
249             for i in [ "size", "md5sum" ]:
250                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
251             # Optional dsc_files fields
252             for i in [ "files id" ]:
253                 if dsc_files[file_entry].has_key(i):
254                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
255
256         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
257                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
258             p.dump(i)
259         dump_file.close()
260
261     ###########################################################################
262
263     # Set up the per-package template substitution mappings
264
265     def update_subst (self, reject_message = ""):
266         Subst = self.Subst
267         changes = self.pkg.changes
268         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
269         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
270             changes["architecture"] = { "Unknown" : "" }
271         # and maintainer2047 may not exist.
272         if not changes.has_key("maintainer2047"):
273             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
274
275         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
276         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
277         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
278
279         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
280         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
281             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
282             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
283                                                      changes["maintainer2047"])
284             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
285         else:
286             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
289
290         if "sponsoremail" in changes:
291             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
292
293         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
294             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
295
296         # Apply any global override of the Maintainer field
297         if self.Cnf.get("Dinstall::OverrideMaintainer"):
298             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
299             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
300
301         Subst["__REJECT_MESSAGE__"] = reject_message
302         Subst["__SOURCE__"] = changes.get("source", "Unknown")
303         Subst["__VERSION__"] = changes.get("version", "Unknown")
304
305     ###########################################################################
306
307     def build_summaries(self):
308         changes = self.pkg.changes
309         files = self.pkg.files
310
311         byhand = summary = new = ""
312
313         # changes["distribution"] may not exist in corner cases
314         # (e.g. unreadable changes files)
315         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
316             changes["distribution"] = {}
317
318         override_summary ="";
319         file_keys = files.keys()
320         file_keys.sort()
321         for file_entry in file_keys:
322             if files[file_entry].has_key("byhand"):
323                 byhand = 1
324                 summary += file_entry + " byhand\n"
325             elif files[file_entry].has_key("new"):
326                 new = 1
327                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
328                 if files[file_entry].has_key("othercomponents"):
329                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
330                 if files[file_entry]["type"] == "deb":
331                     deb_fh = utils.open_file(file_entry)
332                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
333                     deb_fh.close()
334             else:
335                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
336                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
337                 summary += file_entry + "\n  to " + destination + "\n"
338                 if not files[file_entry].has_key("type"):
339                     files[file_entry]["type"] = "unknown"
340                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
341                     # (queue/unchecked), there we have override entries already, use them
342                     # (process-new), there we dont have override entries, use the newly generated ones.
343                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
344                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
345                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
346
347         short_summary = summary
348
349         # This is for direport's benefit...
350         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
351
352         if byhand or new:
353             summary += "Changes: " + f
354
355         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
356
357         summary += self.announce(short_summary, 0)
358
359         return (summary, short_summary)
360
361     ###########################################################################
362
363     def close_bugs (self, summary, action):
364         changes = self.pkg.changes
365         Subst = self.Subst
366         Cnf = self.Cnf
367
368         bugs = changes["closes"].keys()
369
370         if not bugs:
371             return summary
372
373         bugs.sort()
374         summary += "Closing bugs: "
375         for bug in bugs:
376             summary += "%s " % (bug)
377             if action:
378                 Subst["__BUG_NUMBER__"] = bug
379                 if changes["distribution"].has_key("stable"):
380                     Subst["__STABLE_WARNING__"] = """
381 Note that this package is not part of the released stable Debian
382 distribution.  It may have dependencies on other unreleased software,
383 or other instabilities.  Please take care if you wish to install it.
384 The update will eventually make its way into the next released Debian
385 distribution."""
386                 else:
387                     Subst["__STABLE_WARNING__"] = ""
388                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
389                     utils.send_mail (mail_message)
390         if action:
391             self.Logger.log(["closing bugs"]+bugs)
392         summary += "\n"
393
394         return summary
395
396     ###########################################################################
397
398     def announce (self, short_summary, action):
399         Subst = self.Subst
400         Cnf = self.Cnf
401         changes = self.pkg.changes
402
403         # Only do announcements for source uploads with a recent dpkg-dev installed
404         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
405             return ""
406
407         lists_done = {}
408         summary = ""
409         Subst["__SHORT_SUMMARY__"] = short_summary
410
411         for dist in changes["distribution"].keys():
412             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
413             if announce_list == "" or lists_done.has_key(announce_list):
414                 continue
415             lists_done[announce_list] = 1
416             summary += "Announcing to %s\n" % (announce_list)
417
418             if action:
419                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
420                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
421                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
422                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
423                 utils.send_mail (mail_message)
424
425         if Cnf.FindB("Dinstall::CloseBugs"):
426             summary = self.close_bugs(summary, action)
427
428         return summary
429
430     ###########################################################################
431
432     def accept (self, summary, short_summary):
433         Cnf = self.Cnf
434         Subst = self.Subst
435         files = self.pkg.files
436         changes = self.pkg.changes
437         changes_file = self.pkg.changes_file
438         dsc = self.pkg.dsc
439
440         print "Accepting."
441         self.Logger.log(["Accepting changes",changes_file])
442
443         self.dump_vars(Cnf["Dir::Queue::Accepted"])
444
445         # Move all the files into the accepted directory
446         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
447         file_keys = files.keys()
448         for file_entry in file_keys:
449             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
450             self.accept_bytes += float(files[file_entry]["size"])
451         self.accept_count += 1
452
453         # Send accept mail, announce to lists, close bugs and check for
454         # override disparities
455         if not Cnf["Dinstall::Options::No-Mail"]:
456             Subst["__SUITE__"] = ""
457             Subst["__SUMMARY__"] = summary
458             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
459             utils.send_mail(mail_message)
460             self.announce(short_summary, 1)
461
462
463         ## Helper stuff for DebBugs Version Tracking
464         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
465             # ??? once queue/* is cleared on *.d.o and/or reprocessed
466             # the conditionalization on dsc["bts changelog"] should be
467             # dropped.
468
469             # Write out the version history from the changelog
470             if changes["architecture"].has_key("source") and \
471                dsc.has_key("bts changelog"):
472
473                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
474                                                     dotprefix=1, perms=0644)
475                 version_history = utils.open_file(temp_filename, 'w')
476                 version_history.write(dsc["bts changelog"])
477                 version_history.close()
478                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
479                                       changes_file[:-8]+".versions")
480                 os.rename(temp_filename, filename)
481
482             # Write out the binary -> source mapping.
483             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
484                                                 dotprefix=1, perms=0644)
485             debinfo = utils.open_file(temp_filename, 'w')
486             for file_entry in file_keys:
487                 f = files[file_entry]
488                 if f["type"] == "deb":
489                     line = " ".join([f["package"], f["version"],
490                                      f["architecture"], f["source package"],
491                                      f["source version"]])
492                     debinfo.write(line+"\n")
493             debinfo.close()
494             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
495                                   changes_file[:-8]+".debinfo")
496             os.rename(temp_filename, filename)
497
498         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
499
500     ###########################################################################
501
502     def queue_build (self, queue, path):
503         Cnf = self.Cnf
504         Subst = self.Subst
505         files = self.pkg.files
506         changes = self.pkg.changes
507         changes_file = self.pkg.changes_file
508         dsc = self.pkg.dsc
509         file_keys = files.keys()
510
511         ## Special support to enable clean auto-building of queued packages
512         queue_id = database.get_or_set_queue_id(queue)
513
514         self.projectB.query("BEGIN WORK")
515         for suite in changes["distribution"].keys():
516             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
517                 continue
518             suite_id = database.get_suite_id(suite)
519             dest_dir = Cnf["Dir::QueueBuild"]
520             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
521                 dest_dir = os.path.join(dest_dir, suite)
522             for file_entry in file_keys:
523                 src = os.path.join(path, file_entry)
524                 dest = os.path.join(dest_dir, file_entry)
525                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
526                     # Copy it since the original won't be readable by www-data
527                     utils.copy(src, dest)
528                 else:
529                     # Create a symlink to it
530                     os.symlink(src, dest)
531                 # Add it to the list of packages for later processing by apt-ftparchive
532                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
533             # If the .orig.tar.gz is in the pool, create a symlink to
534             # it (if one doesn't already exist)
535             if self.pkg.orig_tar_id:
536                 # Determine the .orig.tar.gz file name
537                 for dsc_file in self.pkg.dsc_files.keys():
538                     if dsc_file.endswith(".orig.tar.gz"):
539                         filename = dsc_file
540                 dest = os.path.join(dest_dir, filename)
541                 # If it doesn't exist, create a symlink
542                 if not os.path.exists(dest):
543                     # Find the .orig.tar.gz in the pool
544                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
545                     ql = q.getresult()
546                     if not ql:
547                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
548                     src = os.path.join(ql[0][0], ql[0][1])
549                     os.symlink(src, dest)
550                     # Add it to the list of packages for later processing by apt-ftparchive
551                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
552                 # if it does, update things to ensure it's not removed prematurely
553                 else:
554                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
555
556         self.projectB.query("COMMIT WORK")
557
558     ###########################################################################
559
560     def check_override (self):
561         Subst = self.Subst
562         changes = self.pkg.changes
563         files = self.pkg.files
564         Cnf = self.Cnf
565
566         # Abandon the check if:
567         #  a) it's a non-sourceful upload
568         #  b) override disparity checks have been disabled
569         #  c) we're not sending mail
570         if not changes["architecture"].has_key("source") or \
571            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
572            Cnf["Dinstall::Options::No-Mail"]:
573             return
574
575         summary = ""
576         file_keys = files.keys()
577         file_keys.sort()
578         for file_entry in file_keys:
579             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
580                 section = files[file_entry]["section"]
581                 override_section = files[file_entry]["override section"]
582                 if section.lower() != override_section.lower() and section != "-":
583                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
584                 priority = files[file_entry]["priority"]
585                 override_priority = files[file_entry]["override priority"]
586                 if priority != override_priority and priority != "-":
587                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
588
589         if summary == "":
590             return
591
592         Subst["__SUMMARY__"] = summary
593         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
594         utils.send_mail(mail_message)
595
596     ###########################################################################
597
598     def force_reject (self, files):
599         """Forcefully move files from the current directory to the
600            reject directory.  If any file already exists in the reject
601            directory it will be moved to the morgue to make way for
602            the new file."""
603
604         Cnf = self.Cnf
605
606         for file_entry in files:
607             # Skip any files which don't exist or which we don't have permission to copy.
608             if os.access(file_entry,os.R_OK) == 0:
609                 continue
610             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
611             try:
612                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
613             except OSError, e:
614                 # File exists?  Let's try and move it to the morgue
615                 if errno.errorcode[e.errno] == 'EEXIST':
616                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
617                     try:
618                         morgue_file = utils.find_next_free(morgue_file)
619                     except NoFreeFilenameError:
620                         # Something's either gone badly Pete Tong, or
621                         # someone is trying to exploit us.
622                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
623                         return
624                     utils.move(dest_file, morgue_file, perms=0660)
625                     try:
626                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
627                     except OSError, e:
628                         # Likewise
629                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
630                         return
631                 else:
632                     raise
633             # If we got here, we own the destination file, so we can
634             # safely overwrite it.
635             utils.move(file_entry, dest_file, 1, perms=0660)
636             os.close(dest_fd)
637
638     ###########################################################################
639
640     def do_reject (self, manual = 0, reject_message = ""):
641         # If we weren't given a manual rejection message, spawn an
642         # editor so the user can add one in...
643         if manual and not reject_message:
644             temp_filename = utils.temp_filename()
645             editor = os.environ.get("EDITOR","vi")
646             answer = 'E'
647             while answer == 'E':
648                 os.system("%s %s" % (editor, temp_filename))
649                 temp_fh = utils.open_file(temp_filename)
650                 reject_message = "".join(temp_fh.readlines())
651                 temp_fh.close()
652                 print "Reject message:"
653                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
654                 prompt = "[R]eject, Edit, Abandon, Quit ?"
655                 answer = "XXX"
656                 while prompt.find(answer) == -1:
657                     answer = utils.our_raw_input(prompt)
658                     m = re_default_answer.search(prompt)
659                     if answer == "":
660                         answer = m.group(1)
661                     answer = answer[:1].upper()
662             os.unlink(temp_filename)
663             if answer == 'A':
664                 return 1
665             elif answer == 'Q':
666                 sys.exit(0)
667
668         print "Rejecting.\n"
669
670         Cnf = self.Cnf
671         Subst = self.Subst
672         pkg = self.pkg
673
674         reason_filename = pkg.changes_file[:-8] + ".reason"
675         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
676
677         # Move all the files into the reject directory
678         reject_files = pkg.files.keys() + [pkg.changes_file]
679         self.force_reject(reject_files)
680
681         # If we fail here someone is probably trying to exploit the race
682         # so let's just raise an exception ...
683         if os.path.exists(reason_filename):
684             os.unlink(reason_filename)
685         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
686
687         if not manual:
688             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
689             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
690             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
691             os.write(reason_fd, reject_message)
692             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
693         else:
694             # Build up the rejection email
695             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
696
697             Subst["__REJECTOR_ADDRESS__"] = user_email_address
698             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
699             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
700             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
701             # Write the rejection email out as the <foo>.reason file
702             os.write(reason_fd, reject_mail_message)
703
704         os.close(reason_fd)
705
706         # Send the rejection mail if appropriate
707         if not Cnf["Dinstall::Options::No-Mail"]:
708             utils.send_mail(reject_mail_message)
709
710         self.Logger.log(["rejected", pkg.changes_file])
711         return 0
712
713     ################################################################################
714
715     # Ensure that source exists somewhere in the archive for the binary
716     # upload being processed.
717     #
718     # (1) exact match                      => 1.0-3
719     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
720
721     def source_exists (self, package, source_version, suites = ["any"]):
722         okay = 1
723         for suite in suites:
724             if suite == "any":
725                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
726                     (package)
727             else:
728                 # source must exist in suite X, or in some other suite that's
729                 # mapped to X, recursively... silent-maps are counted too,
730                 # unreleased-maps aren't.
731                 maps = self.Cnf.ValueList("SuiteMappings")[:]
732                 maps.reverse()
733                 maps = [ m.split() for m in maps ]
734                 maps = [ (x[1], x[2]) for x in maps
735                                 if x[0] == "map" or x[0] == "silent-map" ]
736                 s = [suite]
737                 for x in maps:
738                     if x[1] in s and x[0] not in s:
739                         s.append(x[0])
740
741                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
742             q = self.projectB.query(que)
743
744             # Reduce the query results to a list of version numbers
745             ql = [ i[0] for i in q.getresult() ]
746
747             # Try (1)
748             if source_version in ql:
749                 continue
750
751             # Try (2)
752             orig_source_version = re_bin_only_nmu.sub('', source_version)
753             if orig_source_version in ql:
754                 continue
755
756             # No source found...
757             okay = 0
758             break
759         return okay
760
761     ################################################################################
762
763     def in_override_p (self, package, component, suite, binary_type, file):
764         files = self.pkg.files
765
766         if binary_type == "": # must be source
767             file_type = "dsc"
768         else:
769             file_type = binary_type
770
771         # Override suite name; used for example with proposed-updates
772         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
773             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
774
775         # Avoid <undef> on unknown distributions
776         suite_id = database.get_suite_id(suite)
777         if suite_id == -1:
778             return None
779         component_id = database.get_component_id(component)
780         type_id = database.get_override_type_id(file_type)
781
782         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
783                            % (package, suite_id, component_id, type_id))
784         result = q.getresult()
785         # If checking for a source package fall back on the binary override type
786         if file_type == "dsc" and not result:
787             deb_type_id = database.get_override_type_id("deb")
788             udeb_type_id = database.get_override_type_id("udeb")
789             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
790                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
791             result = q.getresult()
792
793         # Remember the section and priority so we can check them later if appropriate
794         if result:
795             files[file]["override section"] = result[0][0]
796             files[file]["override priority"] = result[0][1]
797
798         return result
799
800     ################################################################################
801
802     def reject (self, str, prefix="Rejected: "):
803         if str:
804             # Unlike other rejects we add new lines first to avoid trailing
805             # new lines when this message is passed back up to a caller.
806             if self.reject_message:
807                 self.reject_message += "\n"
808             self.reject_message += prefix + str
809
810     ################################################################################
811
812     def get_anyversion(self, query_result, suite):
813         anyversion=None
814         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
815         for (v, s) in query_result:
816             if s in [ x.lower() for x in anysuite ]:
817                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
818                     anyversion=v
819         return anyversion
820
821     ################################################################################
822
823     def cross_suite_version_check(self, query_result, file, new_version):
824         """Ensure versions are newer than existing packages in target
825         suites and that cross-suite version checking rules as
826         set out in the conf file are satisfied."""
827
828         # Check versions for each target suite
829         for target_suite in self.pkg.changes["distribution"].keys():
830             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
831             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
832             # Enforce "must be newer than target suite" even if conffile omits it
833             if target_suite not in must_be_newer_than:
834                 must_be_newer_than.append(target_suite)
835             for entry in query_result:
836                 existent_version = entry[0]
837                 suite = entry[1]
838                 if suite in must_be_newer_than and \
839                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
840                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
841                 if suite in must_be_older_than and \
842                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
843                     ch = self.pkg.changes
844                     cansave = 0
845                     if ch.get('distribution-version', {}).has_key(suite):
846                     # we really use the other suite, ignoring the conflicting one ...
847                         addsuite = ch["distribution-version"][suite]
848
849                         add_version = self.get_anyversion(query_result, addsuite)
850                         target_version = self.get_anyversion(query_result, target_suite)
851
852                         if not add_version:
853                             # not add_version can only happen if we map to a suite
854                             # that doesn't enhance the suite we're propup'ing from.
855                             # so "propup-ver x a b c; map a d" is a problem only if
856                             # d doesn't enhance a.
857                             #
858                             # i think we could always propagate in this case, rather
859                             # than complaining. either way, this isn't a REJECT issue
860                             #
861                             # And - we really should complain to the dorks who configured dak
862                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
863                             self.pkg.changes.setdefault("propdistribution", {})
864                             self.pkg.changes["propdistribution"][addsuite] = 1
865                             cansave = 1
866                         elif not target_version:
867                             # not targets_version is true when the package is NEW
868                             # we could just stick with the "...old version..." REJECT
869                             # for this, I think.
870                             self.reject("Won't propogate NEW packages.")
871                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
872                             # propogation would be redundant. no need to reject though.
873                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
874                             cansave = 1
875                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
876                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
877                             # propogate!!
878                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
879                             self.pkg.changes.setdefault("propdistribution", {})
880                             self.pkg.changes["propdistribution"][addsuite] = 1
881                             cansave = 1
882
883                     if not cansave:
884                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
885
886     ################################################################################
887
888     def check_binary_against_db(self, file):
889         self.reject_message = ""
890         files = self.pkg.files
891
892         # Ensure version is sane
893         q = self.projectB.query("""
894 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
895                                      architecture a
896  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
897    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
898                                 % (files[file]["package"],
899                                    files[file]["architecture"]))
900         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
901
902         # Check for any existing copies of the file
903         q = self.projectB.query("""
904 SELECT b.id FROM binaries b, architecture a
905  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
906    AND a.id = b.architecture"""
907                                 % (files[file]["package"],
908                                    files[file]["version"],
909                                    files[file]["architecture"]))
910         if q.getresult():
911             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
912
913         return self.reject_message
914
915     ################################################################################
916
917     def check_source_against_db(self, file):
918         self.reject_message = ""
919         dsc = self.pkg.dsc
920
921         # Ensure version is sane
922         q = self.projectB.query("""
923 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
924  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
925         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
926
927         return self.reject_message
928
929     ################################################################################
930
931     # **WARNING**
932     # NB: this function can remove entries from the 'files' index [if
933     # the .orig.tar.gz is a duplicate of the one in the archive]; if
934     # you're iterating over 'files' and call this function as part of
935     # the loop, be sure to add a check to the top of the loop to
936     # ensure you haven't just tried to dereference the deleted entry.
937     # **WARNING**
938
939     def check_dsc_against_db(self, file):
940         self.reject_message = ""
941         files = self.pkg.files
942         dsc_files = self.pkg.dsc_files
943         legacy_source_untouchable = self.pkg.legacy_source_untouchable
944         self.pkg.orig_tar_gz = None
945
946         # Try and find all files mentioned in the .dsc.  This has
947         # to work harder to cope with the multiple possible
948         # locations of an .orig.tar.gz.
949         # The ordering on the select is needed to pick the newest orig
950         # when it exists in multiple places.
951         for dsc_file in dsc_files.keys():
952             found = None
953             if files.has_key(dsc_file):
954                 actual_md5 = files[dsc_file]["md5sum"]
955                 actual_size = int(files[dsc_file]["size"])
956                 found = "%s in incoming" % (dsc_file)
957                 # Check the file does not already exist in the archive
958                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
959                 ql = q.getresult()
960                 # Strip out anything that isn't '%s' or '/%s$'
961                 for i in ql:
962                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
963                         ql.remove(i)
964
965                 # "[dak] has not broken them.  [dak] has fixed a
966                 # brokenness.  Your crappy hack exploited a bug in
967                 # the old dinstall.
968                 #
969                 # "(Come on!  I thought it was always obvious that
970                 # one just doesn't release different files with
971                 # the same name and version.)"
972                 #                        -- ajk@ on d-devel@l.d.o
973
974                 if ql:
975                     # Ignore exact matches for .orig.tar.gz
976                     match = 0
977                     if dsc_file.endswith(".orig.tar.gz"):
978                         for i in ql:
979                             if files.has_key(dsc_file) and \
980                                int(files[dsc_file]["size"]) == int(i[0]) and \
981                                files[dsc_file]["md5sum"] == i[1]:
982                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
983                                 del files[dsc_file]
984                                 self.pkg.orig_tar_gz = i[2] + i[3]
985                                 match = 1
986
987                     if not match:
988                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
989             elif dsc_file.endswith(".orig.tar.gz"):
990                 # Check in the pool
991                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
992                 ql = q.getresult()
993                 # Strip out anything that isn't '%s' or '/%s$'
994                 for i in ql:
995                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
996                         ql.remove(i)
997
998                 if ql:
999                     # Unfortunately, we may get more than one match here if,
1000                     # for example, the package was in potato but had an -sa
1001                     # upload in woody.  So we need to choose the right one.
1002
1003                     x = ql[0]; # default to something sane in case we don't match any or have only one
1004
1005                     if len(ql) > 1:
1006                         for i in ql:
1007                             old_file = i[0] + i[1]
1008                             old_file_fh = utils.open_file(old_file)
1009                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1010                             old_file_fh.close()
1011                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1012                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1013                                 x = i
1014                             else:
1015                                 legacy_source_untouchable[i[3]] = ""
1016
1017                     old_file = x[0] + x[1]
1018                     old_file_fh = utils.open_file(old_file)
1019                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1020                     old_file_fh.close()
1021                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1022                     found = old_file
1023                     suite_type = x[2]
1024                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1025                     # See install() in process-accepted...
1026                     self.pkg.orig_tar_id = x[3]
1027                     self.pkg.orig_tar_gz = old_file
1028                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1029                         self.pkg.orig_tar_location = "legacy"
1030                     else:
1031                         self.pkg.orig_tar_location = x[4]
1032                 else:
1033                     # Not there? Check the queue directories...
1034
1035                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1036                     # See process_it() in 'dak process-unchecked' for explanation of this
1037                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1038                     # ever make sense?
1039                     if os.path.exists(in_unchecked) and False:
1040                         return (self.reject_message, in_unchecked)
1041                     else:
1042                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1043                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1044                             if os.path.exists(in_otherdir):
1045                                 in_otherdir_fh = utils.open_file(in_otherdir)
1046                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1047                                 in_otherdir_fh.close()
1048                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1049                                 found = in_otherdir
1050                                 self.pkg.orig_tar_gz = in_otherdir
1051
1052                     if not found:
1053                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1054                         self.pkg.orig_tar_gz = -1
1055                         continue
1056             else:
1057                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1058                 continue
1059             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1060                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1061             if actual_size != int(dsc_files[dsc_file]["size"]):
1062                 self.reject("size for %s doesn't match %s." % (found, file))
1063
1064         return (self.reject_message, None)
1065
1066     def do_query(self, q):
1067         sys.stderr.write("query: \"%s\" ... " % (q))
1068         before = time.time()
1069         r = self.projectB.query(q)
1070         time_diff = time.time()-before
1071         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1072         return r