]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
make .dak files world-readable
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         self.pkg.changes.clear()
175         self.pkg.dsc.clear()
176         self.pkg.files.clear()
177         self.pkg.dsc_files.clear()
178         self.pkg.legacy_source_untouchable.clear()
179         self.pkg.orig_tar_id = None
180         self.pkg.orig_tar_location = ""
181         self.pkg.orig_tar_gz = None
182
183     ###########################################################################
184
185     def update_vars (self):
186         dump_filename = self.pkg.changes_file[:-8]+".dak"
187         dump_file = utils.open_file(dump_filename)
188         p = cPickle.Unpickler(dump_file)
189
190         self.pkg.changes.update(p.load())
191         self.pkg.dsc.update(p.load())
192         self.pkg.files.update(p.load())
193         self.pkg.dsc_files.update(p.load())
194         self.pkg.legacy_source_untouchable.update(p.load())
195
196         self.pkg.orig_tar_id = p.load()
197         self.pkg.orig_tar_location = p.load()
198
199         dump_file.close()
200
201     ###########################################################################
202
203     # This could just dump the dictionaries as is, but I'd like to
204     # avoid this so there's some idea of what process-accepted &
205     # process-new use from process-unchecked
206
207     def dump_vars(self, dest_dir):
208
209         changes = self.pkg.changes
210         dsc = self.pkg.dsc
211         files = self.pkg.files
212         dsc_files = self.pkg.dsc_files
213         legacy_source_untouchable = self.pkg.legacy_source_untouchable
214         orig_tar_id = self.pkg.orig_tar_id
215         orig_tar_location = self.pkg.orig_tar_location
216
217         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218         dump_file = utils.open_file(dump_filename, 'w')
219         try:
220             os.chmod(dump_filename, 0664)
221         except OSError, e:
222             raise
223
224         p = cPickle.Pickler(dump_file, 1)
225         d_changes = {}
226         d_dsc = {}
227         d_files = {}
228         d_dsc_files = {}
229
230         ## files
231         for file_entry in files.keys():
232             d_files[file_entry] = {}
233             for i in [ "package", "version", "architecture", "type", "size",
234                        "md5sum", "sha1sum", "sha256sum", "component",
235                        "location id", "source package", "source version",
236                        "maintainer", "dbtype", "files id", "new",
237                        "section", "priority", "othercomponents",
238                        "pool name", "original component" ]:
239                 if files[file_entry].has_key(i):
240                     d_files[file_entry][i] = files[file_entry][i]
241         ## changes
242         # Mandatory changes fields
243         for i in [ "distribution", "source", "architecture", "version",
244                    "maintainer", "urgency", "fingerprint", "changedby822",
245                    "changedby2047", "changedbyname", "maintainer822",
246                    "maintainer2047", "maintainername", "maintaineremail",
247                    "closes", "changes" ]:
248             d_changes[i] = changes[i]
249         # Optional changes fields
250         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
251                    "sponsoremail" ]:
252             if changes.has_key(i):
253                 d_changes[i] = changes[i]
254         ## dsc
255         for i in [ "source", "version", "maintainer", "fingerprint",
256                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
257             if dsc.has_key(i):
258                 d_dsc[i] = dsc[i]
259         ## dsc_files
260         for file_entry in dsc_files.keys():
261             d_dsc_files[file_entry] = {}
262             # Mandatory dsc_files fields
263             for i in [ "size", "md5sum" ]:
264                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
265             # Optional dsc_files fields
266             for i in [ "files id" ]:
267                 if dsc_files[file_entry].has_key(i):
268                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
269
270         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
271                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
272             p.dump(i)
273         dump_file.close()
274
275     ###########################################################################
276
277     # Set up the per-package template substitution mappings
278
279     def update_subst (self, reject_message = ""):
280         Subst = self.Subst
281         changes = self.pkg.changes
282         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
283         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
284             changes["architecture"] = { "Unknown" : "" }
285         # and maintainer2047 may not exist.
286         if not changes.has_key("maintainer2047"):
287             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
288
289         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
290         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
291         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
292
293         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
294         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
295             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
296             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
297                                                      changes["maintainer2047"])
298             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
299         else:
300             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
301             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
302             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
303
304         if "sponsoremail" in changes:
305             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
306
307         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
308             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
309
310         # Apply any global override of the Maintainer field
311         if self.Cnf.get("Dinstall::OverrideMaintainer"):
312             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
313             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
314
315         Subst["__REJECT_MESSAGE__"] = reject_message
316         Subst["__SOURCE__"] = changes.get("source", "Unknown")
317         Subst["__VERSION__"] = changes.get("version", "Unknown")
318
319     ###########################################################################
320
321     def build_summaries(self):
322         changes = self.pkg.changes
323         files = self.pkg.files
324
325         byhand = summary = new = ""
326
327         # changes["distribution"] may not exist in corner cases
328         # (e.g. unreadable changes files)
329         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
330             changes["distribution"] = {}
331
332         override_summary ="";
333         file_keys = files.keys()
334         file_keys.sort()
335         for file_entry in file_keys:
336             if files[file_entry].has_key("byhand"):
337                 byhand = 1
338                 summary += file_entry + " byhand\n"
339             elif files[file_entry].has_key("new"):
340                 new = 1
341                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
342                 if files[file_entry].has_key("othercomponents"):
343                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
344                 if files[file_entry]["type"] == "deb":
345                     deb_fh = utils.open_file(file_entry)
346                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
347                     deb_fh.close()
348             else:
349                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
350                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
351                 summary += file_entry + "\n  to " + destination + "\n"
352                 if not files[file_entry].has_key("type"):
353                     files[file_entry]["type"] = "unknown"
354                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
355                     # (queue/unchecked), there we have override entries already, use them
356                     # (process-new), there we dont have override entries, use the newly generated ones.
357                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
358                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
359                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
360
361         short_summary = summary
362
363         # This is for direport's benefit...
364         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
365
366         if byhand or new:
367             summary += "Changes: " + f
368
369         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
370
371         summary += self.announce(short_summary, 0)
372
373         return (summary, short_summary)
374
375     ###########################################################################
376
377     def close_bugs (self, summary, action):
378         changes = self.pkg.changes
379         Subst = self.Subst
380         Cnf = self.Cnf
381
382         bugs = changes["closes"].keys()
383
384         if not bugs:
385             return summary
386
387         bugs.sort()
388         summary += "Closing bugs: "
389         for bug in bugs:
390             summary += "%s " % (bug)
391             if action:
392                 Subst["__BUG_NUMBER__"] = bug
393                 if changes["distribution"].has_key("stable"):
394                     Subst["__STABLE_WARNING__"] = """
395 Note that this package is not part of the released stable Debian
396 distribution.  It may have dependencies on other unreleased software,
397 or other instabilities.  Please take care if you wish to install it.
398 The update will eventually make its way into the next released Debian
399 distribution."""
400                 else:
401                     Subst["__STABLE_WARNING__"] = ""
402                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
403                     utils.send_mail (mail_message)
404         if action:
405             self.Logger.log(["closing bugs"]+bugs)
406         summary += "\n"
407
408         return summary
409
410     ###########################################################################
411
412     def announce (self, short_summary, action):
413         Subst = self.Subst
414         Cnf = self.Cnf
415         changes = self.pkg.changes
416
417         # Only do announcements for source uploads with a recent dpkg-dev installed
418         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
419             return ""
420
421         lists_done = {}
422         summary = ""
423         Subst["__SHORT_SUMMARY__"] = short_summary
424
425         for dist in changes["distribution"].keys():
426             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
427             if announce_list == "" or lists_done.has_key(announce_list):
428                 continue
429             lists_done[announce_list] = 1
430             summary += "Announcing to %s\n" % (announce_list)
431
432             if action:
433                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
434                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
435                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
436                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
437                 utils.send_mail (mail_message)
438
439         if Cnf.FindB("Dinstall::CloseBugs"):
440             summary = self.close_bugs(summary, action)
441
442         return summary
443
444     ###########################################################################
445
446     def accept (self, summary, short_summary):
447         Cnf = self.Cnf
448         Subst = self.Subst
449         files = self.pkg.files
450         changes = self.pkg.changes
451         changes_file = self.pkg.changes_file
452         dsc = self.pkg.dsc
453
454         print "Accepting."
455         self.Logger.log(["Accepting changes",changes_file])
456
457         self.dump_vars(Cnf["Dir::Queue::Accepted"])
458
459         # Move all the files into the accepted directory
460         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
461         file_keys = files.keys()
462         for file_entry in file_keys:
463             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
464             self.accept_bytes += float(files[file_entry]["size"])
465         self.accept_count += 1
466
467         # Send accept mail, announce to lists, close bugs and check for
468         # override disparities
469         if not Cnf["Dinstall::Options::No-Mail"]:
470             Subst["__SUITE__"] = ""
471             Subst["__SUMMARY__"] = summary
472             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
473             utils.send_mail(mail_message)
474             self.announce(short_summary, 1)
475
476
477         ## Helper stuff for DebBugs Version Tracking
478         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
479             # ??? once queue/* is cleared on *.d.o and/or reprocessed
480             # the conditionalization on dsc["bts changelog"] should be
481             # dropped.
482
483             # Write out the version history from the changelog
484             if changes["architecture"].has_key("source") and \
485                dsc.has_key("bts changelog"):
486
487                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
488                                                     dotprefix=1, perms=0644)
489                 version_history = utils.open_file(temp_filename, 'w')
490                 version_history.write(dsc["bts changelog"])
491                 version_history.close()
492                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
493                                       changes_file[:-8]+".versions")
494                 os.rename(temp_filename, filename)
495
496             # Write out the binary -> source mapping.
497             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
498                                                 dotprefix=1, perms=0644)
499             debinfo = utils.open_file(temp_filename, 'w')
500             for file_entry in file_keys:
501                 f = files[file_entry]
502                 if f["type"] == "deb":
503                     line = " ".join([f["package"], f["version"],
504                                      f["architecture"], f["source package"],
505                                      f["source version"]])
506                     debinfo.write(line+"\n")
507             debinfo.close()
508             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
509                                   changes_file[:-8]+".debinfo")
510             os.rename(temp_filename, filename)
511
512         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
513
514     ###########################################################################
515
516     def queue_build (self, queue, path):
517         Cnf = self.Cnf
518         Subst = self.Subst
519         files = self.pkg.files
520         changes = self.pkg.changes
521         changes_file = self.pkg.changes_file
522         dsc = self.pkg.dsc
523         file_keys = files.keys()
524
525         ## Special support to enable clean auto-building of queued packages
526         queue_id = database.get_or_set_queue_id(queue)
527
528         self.projectB.query("BEGIN WORK")
529         for suite in changes["distribution"].keys():
530             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
531                 continue
532             suite_id = database.get_suite_id(suite)
533             dest_dir = Cnf["Dir::QueueBuild"]
534             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
535                 dest_dir = os.path.join(dest_dir, suite)
536             for file_entry in file_keys:
537                 src = os.path.join(path, file_entry)
538                 dest = os.path.join(dest_dir, file_entry)
539                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
540                     # Copy it since the original won't be readable by www-data
541                     utils.copy(src, dest)
542                 else:
543                     # Create a symlink to it
544                     os.symlink(src, dest)
545                 # Add it to the list of packages for later processing by apt-ftparchive
546                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
547             # If the .orig.tar.gz is in the pool, create a symlink to
548             # it (if one doesn't already exist)
549             if self.pkg.orig_tar_id:
550                 # Determine the .orig.tar.gz file name
551                 for dsc_file in self.pkg.dsc_files.keys():
552                     if dsc_file.endswith(".orig.tar.gz"):
553                         filename = dsc_file
554                 dest = os.path.join(dest_dir, filename)
555                 # If it doesn't exist, create a symlink
556                 if not os.path.exists(dest):
557                     # Find the .orig.tar.gz in the pool
558                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
559                     ql = q.getresult()
560                     if not ql:
561                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
562                     src = os.path.join(ql[0][0], ql[0][1])
563                     os.symlink(src, dest)
564                     # Add it to the list of packages for later processing by apt-ftparchive
565                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
566                 # if it does, update things to ensure it's not removed prematurely
567                 else:
568                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
569
570         self.projectB.query("COMMIT WORK")
571
572     ###########################################################################
573
574     def check_override (self):
575         Subst = self.Subst
576         changes = self.pkg.changes
577         files = self.pkg.files
578         Cnf = self.Cnf
579
580         # Abandon the check if:
581         #  a) it's a non-sourceful upload
582         #  b) override disparity checks have been disabled
583         #  c) we're not sending mail
584         if not changes["architecture"].has_key("source") or \
585            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
586            Cnf["Dinstall::Options::No-Mail"]:
587             return
588
589         summary = ""
590         file_keys = files.keys()
591         file_keys.sort()
592         for file_entry in file_keys:
593             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
594                 section = files[file_entry]["section"]
595                 override_section = files[file_entry]["override section"]
596                 if section.lower() != override_section.lower() and section != "-":
597                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
598                 priority = files[file_entry]["priority"]
599                 override_priority = files[file_entry]["override priority"]
600                 if priority != override_priority and priority != "-":
601                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
602
603         if summary == "":
604             return
605
606         Subst["__SUMMARY__"] = summary
607         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
608         utils.send_mail(mail_message)
609
610     ###########################################################################
611
612     def force_reject (self, files):
613         """Forcefully move files from the current directory to the
614            reject directory.  If any file already exists in the reject
615            directory it will be moved to the morgue to make way for
616            the new file."""
617
618         Cnf = self.Cnf
619
620         for file_entry in files:
621             # Skip any files which don't exist or which we don't have permission to copy.
622             if os.access(file_entry,os.R_OK) == 0:
623                 continue
624             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
625             try:
626                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
627             except OSError, e:
628                 # File exists?  Let's try and move it to the morgue
629                 if errno.errorcode[e.errno] == 'EEXIST':
630                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
631                     try:
632                         morgue_file = utils.find_next_free(morgue_file)
633                     except NoFreeFilenameError:
634                         # Something's either gone badly Pete Tong, or
635                         # someone is trying to exploit us.
636                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
637                         return
638                     utils.move(dest_file, morgue_file, perms=0660)
639                     try:
640                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
641                     except OSError, e:
642                         # Likewise
643                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
644                         return
645                 else:
646                     raise
647             # If we got here, we own the destination file, so we can
648             # safely overwrite it.
649             utils.move(file_entry, dest_file, 1, perms=0660)
650             os.close(dest_fd)
651
652     ###########################################################################
653
654     def do_reject (self, manual = 0, reject_message = ""):
655         # If we weren't given a manual rejection message, spawn an
656         # editor so the user can add one in...
657         if manual and not reject_message:
658             temp_filename = utils.temp_filename()
659             editor = os.environ.get("EDITOR","vi")
660             answer = 'E'
661             while answer == 'E':
662                 os.system("%s %s" % (editor, temp_filename))
663                 temp_fh = utils.open_file(temp_filename)
664                 reject_message = "".join(temp_fh.readlines())
665                 temp_fh.close()
666                 print "Reject message:"
667                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
668                 prompt = "[R]eject, Edit, Abandon, Quit ?"
669                 answer = "XXX"
670                 while prompt.find(answer) == -1:
671                     answer = utils.our_raw_input(prompt)
672                     m = re_default_answer.search(prompt)
673                     if answer == "":
674                         answer = m.group(1)
675                     answer = answer[:1].upper()
676             os.unlink(temp_filename)
677             if answer == 'A':
678                 return 1
679             elif answer == 'Q':
680                 sys.exit(0)
681
682         print "Rejecting.\n"
683
684         Cnf = self.Cnf
685         Subst = self.Subst
686         pkg = self.pkg
687
688         reason_filename = pkg.changes_file[:-8] + ".reason"
689         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
690
691         # Move all the files into the reject directory
692         reject_files = pkg.files.keys() + [pkg.changes_file]
693         self.force_reject(reject_files)
694
695         # If we fail here someone is probably trying to exploit the race
696         # so let's just raise an exception ...
697         if os.path.exists(reason_filename):
698             os.unlink(reason_filename)
699         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
700
701         if not manual:
702             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
703             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
704             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
705             os.write(reason_fd, reject_message)
706             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
707         else:
708             # Build up the rejection email
709             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
710
711             Subst["__REJECTOR_ADDRESS__"] = user_email_address
712             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
713             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
714             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
715             # Write the rejection email out as the <foo>.reason file
716             os.write(reason_fd, reject_mail_message)
717
718         os.close(reason_fd)
719
720         # Send the rejection mail if appropriate
721         if not Cnf["Dinstall::Options::No-Mail"]:
722             utils.send_mail(reject_mail_message)
723
724         self.Logger.log(["rejected", pkg.changes_file])
725         return 0
726
727     ################################################################################
728
729     # Ensure that source exists somewhere in the archive for the binary
730     # upload being processed.
731     #
732     # (1) exact match                      => 1.0-3
733     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
734
735     def source_exists (self, package, source_version, suites = ["any"]):
736         okay = 1
737         for suite in suites:
738             if suite == "any":
739                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
740                     (package)
741             else:
742                 # source must exist in suite X, or in some other suite that's
743                 # mapped to X, recursively... silent-maps are counted too,
744                 # unreleased-maps aren't.
745                 maps = self.Cnf.ValueList("SuiteMappings")[:]
746                 maps.reverse()
747                 maps = [ m.split() for m in maps ]
748                 maps = [ (x[1], x[2]) for x in maps
749                                 if x[0] == "map" or x[0] == "silent-map" ]
750                 s = [suite]
751                 for x in maps:
752                     if x[1] in s and x[0] not in s:
753                         s.append(x[0])
754
755                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
756             q = self.projectB.query(que)
757
758             # Reduce the query results to a list of version numbers
759             ql = [ i[0] for i in q.getresult() ]
760
761             # Try (1)
762             if source_version in ql:
763                 continue
764
765             # Try (2)
766             orig_source_version = re_bin_only_nmu.sub('', source_version)
767             if orig_source_version in ql:
768                 continue
769
770             # No source found...
771             okay = 0
772             break
773         return okay
774
775     ################################################################################
776
777     def in_override_p (self, package, component, suite, binary_type, file):
778         files = self.pkg.files
779
780         if binary_type == "": # must be source
781             file_type = "dsc"
782         else:
783             file_type = binary_type
784
785         # Override suite name; used for example with proposed-updates
786         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
787             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
788
789         # Avoid <undef> on unknown distributions
790         suite_id = database.get_suite_id(suite)
791         if suite_id == -1:
792             return None
793         component_id = database.get_component_id(component)
794         type_id = database.get_override_type_id(file_type)
795
796         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
797                            % (package, suite_id, component_id, type_id))
798         result = q.getresult()
799         # If checking for a source package fall back on the binary override type
800         if file_type == "dsc" and not result:
801             deb_type_id = database.get_override_type_id("deb")
802             udeb_type_id = database.get_override_type_id("udeb")
803             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
804                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
805             result = q.getresult()
806
807         # Remember the section and priority so we can check them later if appropriate
808         if result:
809             files[file]["override section"] = result[0][0]
810             files[file]["override priority"] = result[0][1]
811
812         return result
813
814     ################################################################################
815
816     def reject (self, str, prefix="Rejected: "):
817         if str:
818             # Unlike other rejects we add new lines first to avoid trailing
819             # new lines when this message is passed back up to a caller.
820             if self.reject_message:
821                 self.reject_message += "\n"
822             self.reject_message += prefix + str
823
824     ################################################################################
825
826     def get_anyversion(self, query_result, suite):
827         anyversion=None
828         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
829         for (v, s) in query_result:
830             if s in [ x.lower() for x in anysuite ]:
831                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
832                     anyversion=v
833         return anyversion
834
835     ################################################################################
836
837     def cross_suite_version_check(self, query_result, file, new_version):
838         """Ensure versions are newer than existing packages in target
839         suites and that cross-suite version checking rules as
840         set out in the conf file are satisfied."""
841
842         # Check versions for each target suite
843         for target_suite in self.pkg.changes["distribution"].keys():
844             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
845             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
846             # Enforce "must be newer than target suite" even if conffile omits it
847             if target_suite not in must_be_newer_than:
848                 must_be_newer_than.append(target_suite)
849             for entry in query_result:
850                 existent_version = entry[0]
851                 suite = entry[1]
852                 if suite in must_be_newer_than and \
853                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
854                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
855                 if suite in must_be_older_than and \
856                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
857                     ch = self.pkg.changes
858                     cansave = 0
859                     if ch.get('distribution-version', {}).has_key(suite):
860                     # we really use the other suite, ignoring the conflicting one ...
861                         addsuite = ch["distribution-version"][suite]
862
863                         add_version = self.get_anyversion(query_result, addsuite)
864                         target_version = self.get_anyversion(query_result, target_suite)
865
866                         if not add_version:
867                             # not add_version can only happen if we map to a suite
868                             # that doesn't enhance the suite we're propup'ing from.
869                             # so "propup-ver x a b c; map a d" is a problem only if
870                             # d doesn't enhance a.
871                             #
872                             # i think we could always propagate in this case, rather
873                             # than complaining. either way, this isn't a REJECT issue
874                             #
875                             # And - we really should complain to the dorks who configured dak
876                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
877                             self.pkg.changes.setdefault("propdistribution", {})
878                             self.pkg.changes["propdistribution"][addsuite] = 1
879                             cansave = 1
880                         elif not target_version:
881                             # not targets_version is true when the package is NEW
882                             # we could just stick with the "...old version..." REJECT
883                             # for this, I think.
884                             self.reject("Won't propogate NEW packages.")
885                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
886                             # propogation would be redundant. no need to reject though.
887                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
888                             cansave = 1
889                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
890                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
891                             # propogate!!
892                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
893                             self.pkg.changes.setdefault("propdistribution", {})
894                             self.pkg.changes["propdistribution"][addsuite] = 1
895                             cansave = 1
896
897                     if not cansave:
898                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
899
900     ################################################################################
901
902     def check_binary_against_db(self, file):
903         self.reject_message = ""
904         files = self.pkg.files
905
906         # Ensure version is sane
907         q = self.projectB.query("""
908 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
909                                      architecture a
910  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
911    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
912                                 % (files[file]["package"],
913                                    files[file]["architecture"]))
914         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
915
916         # Check for any existing copies of the file
917         q = self.projectB.query("""
918 SELECT b.id FROM binaries b, architecture a
919  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
920    AND a.id = b.architecture"""
921                                 % (files[file]["package"],
922                                    files[file]["version"],
923                                    files[file]["architecture"]))
924         if q.getresult():
925             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
926
927         return self.reject_message
928
929     ################################################################################
930
931     def check_source_against_db(self, file):
932         self.reject_message = ""
933         dsc = self.pkg.dsc
934
935         # Ensure version is sane
936         q = self.projectB.query("""
937 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
938  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
939         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
940
941         return self.reject_message
942
943     ################################################################################
944
945     # **WARNING**
946     # NB: this function can remove entries from the 'files' index [if
947     # the .orig.tar.gz is a duplicate of the one in the archive]; if
948     # you're iterating over 'files' and call this function as part of
949     # the loop, be sure to add a check to the top of the loop to
950     # ensure you haven't just tried to dereference the deleted entry.
951     # **WARNING**
952
953     def check_dsc_against_db(self, file):
954         self.reject_message = ""
955         files = self.pkg.files
956         dsc_files = self.pkg.dsc_files
957         legacy_source_untouchable = self.pkg.legacy_source_untouchable
958         self.pkg.orig_tar_gz = None
959
960         # Try and find all files mentioned in the .dsc.  This has
961         # to work harder to cope with the multiple possible
962         # locations of an .orig.tar.gz.
963         # The ordering on the select is needed to pick the newest orig
964         # when it exists in multiple places.
965         for dsc_file in dsc_files.keys():
966             found = None
967             if files.has_key(dsc_file):
968                 actual_md5 = files[dsc_file]["md5sum"]
969                 actual_size = int(files[dsc_file]["size"])
970                 found = "%s in incoming" % (dsc_file)
971                 # Check the file does not already exist in the archive
972                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
973                 ql = q.getresult()
974                 # Strip out anything that isn't '%s' or '/%s$'
975                 for i in ql:
976                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
977                         ql.remove(i)
978
979                 # "[dak] has not broken them.  [dak] has fixed a
980                 # brokenness.  Your crappy hack exploited a bug in
981                 # the old dinstall.
982                 #
983                 # "(Come on!  I thought it was always obvious that
984                 # one just doesn't release different files with
985                 # the same name and version.)"
986                 #                        -- ajk@ on d-devel@l.d.o
987
988                 if ql:
989                     # Ignore exact matches for .orig.tar.gz
990                     match = 0
991                     if dsc_file.endswith(".orig.tar.gz"):
992                         for i in ql:
993                             if files.has_key(dsc_file) and \
994                                int(files[dsc_file]["size"]) == int(i[0]) and \
995                                files[dsc_file]["md5sum"] == i[1]:
996                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
997                                 del files[dsc_file]
998                                 self.pkg.orig_tar_gz = i[2] + i[3]
999                                 match = 1
1000
1001                     if not match:
1002                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1003             elif dsc_file.endswith(".orig.tar.gz"):
1004                 # Check in the pool
1005                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1006                 ql = q.getresult()
1007                 # Strip out anything that isn't '%s' or '/%s$'
1008                 for i in ql:
1009                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1010                         ql.remove(i)
1011
1012                 if ql:
1013                     # Unfortunately, we may get more than one match here if,
1014                     # for example, the package was in potato but had an -sa
1015                     # upload in woody.  So we need to choose the right one.
1016
1017                     x = ql[0]; # default to something sane in case we don't match any or have only one
1018
1019                     if len(ql) > 1:
1020                         for i in ql:
1021                             old_file = i[0] + i[1]
1022                             old_file_fh = utils.open_file(old_file)
1023                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1024                             old_file_fh.close()
1025                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1026                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1027                                 x = i
1028                             else:
1029                                 legacy_source_untouchable[i[3]] = ""
1030
1031                     old_file = x[0] + x[1]
1032                     old_file_fh = utils.open_file(old_file)
1033                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1034                     old_file_fh.close()
1035                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1036                     found = old_file
1037                     suite_type = x[2]
1038                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1039                     # See install() in process-accepted...
1040                     self.pkg.orig_tar_id = x[3]
1041                     self.pkg.orig_tar_gz = old_file
1042                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1043                         self.pkg.orig_tar_location = "legacy"
1044                     else:
1045                         self.pkg.orig_tar_location = x[4]
1046                 else:
1047                     # Not there? Check the queue directories...
1048
1049                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1050                     # See process_it() in 'dak process-unchecked' for explanation of this
1051                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1052                     # ever make sense?
1053                     if os.path.exists(in_unchecked) and False:
1054                         return (self.reject_message, in_unchecked)
1055                     else:
1056                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1057                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1058                             if os.path.exists(in_otherdir):
1059                                 in_otherdir_fh = utils.open_file(in_otherdir)
1060                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1061                                 in_otherdir_fh.close()
1062                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1063                                 found = in_otherdir
1064                                 self.pkg.orig_tar_gz = in_otherdir
1065
1066                     if not found:
1067                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1068                         self.pkg.orig_tar_gz = -1
1069                         continue
1070             else:
1071                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1072                 continue
1073             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1074                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1075             if actual_size != int(dsc_files[dsc_file]["size"]):
1076                 self.reject("size for %s doesn't match %s." % (found, file))
1077
1078         return (self.reject_message, None)
1079
1080     def do_query(self, q):
1081         sys.stderr.write("query: \"%s\" ... " % (q))
1082         before = time.time()
1083         r = self.projectB.query(q)
1084         time_diff = time.time()-before
1085         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1086         return r