]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
1dd3a75e2d83c9ce70d527a955fb8d7740640cec
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         self.pkg.changes.clear()
175         self.pkg.dsc.clear()
176         self.pkg.files.clear()
177         self.pkg.dsc_files.clear()
178         self.pkg.legacy_source_untouchable.clear()
179         self.pkg.orig_tar_id = None
180         self.pkg.orig_tar_location = ""
181         self.pkg.orig_tar_gz = None
182
183     ###########################################################################
184
185     def update_vars (self):
186         dump_filename = self.pkg.changes_file[:-8]+".dak"
187         dump_file = utils.open_file(dump_filename)
188         p = cPickle.Unpickler(dump_file)
189
190         self.pkg.changes.update(p.load())
191         self.pkg.dsc.update(p.load())
192         self.pkg.files.update(p.load())
193         self.pkg.dsc_files.update(p.load())
194         self.pkg.legacy_source_untouchable.update(p.load())
195
196         self.pkg.orig_tar_id = p.load()
197         self.pkg.orig_tar_location = p.load()
198
199         dump_file.close()
200
201     ###########################################################################
202
203     # This could just dump the dictionaries as is, but I'd like to
204     # avoid this so there's some idea of what process-accepted &
205     # process-new use from process-unchecked
206
207     def dump_vars(self, dest_dir):
208
209         changes = self.pkg.changes
210         dsc = self.pkg.dsc
211         files = self.pkg.files
212         dsc_files = self.pkg.dsc_files
213         legacy_source_untouchable = self.pkg.legacy_source_untouchable
214         orig_tar_id = self.pkg.orig_tar_id
215         orig_tar_location = self.pkg.orig_tar_location
216
217         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218         dump_file = utils.open_file(dump_filename, 'w')
219         os.chmod(dump_filename, 0664)
220
221         p = cPickle.Pickler(dump_file, 1)
222         d_changes = {}
223         d_dsc = {}
224         d_files = {}
225         d_dsc_files = {}
226
227         ## files
228         for file_entry in files.keys():
229             d_files[file_entry] = {}
230             for i in [ "package", "version", "architecture", "type", "size",
231                        "md5sum", "sha1sum", "sha256sum", "component",
232                        "location id", "source package", "source version",
233                        "maintainer", "dbtype", "files id", "new",
234                        "section", "priority", "othercomponents",
235                        "pool name", "original component" ]:
236                 if files[file_entry].has_key(i):
237                     d_files[file_entry][i] = files[file_entry][i]
238         ## changes
239         # Mandatory changes fields
240         for i in [ "distribution", "source", "architecture", "version",
241                    "maintainer", "urgency", "fingerprint", "changedby822",
242                    "changedby2047", "changedbyname", "maintainer822",
243                    "maintainer2047", "maintainername", "maintaineremail",
244                    "closes", "changes" ]:
245             d_changes[i] = changes[i]
246         # Optional changes fields
247         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
248                    "sponsoremail" ]:
249             if changes.has_key(i):
250                 d_changes[i] = changes[i]
251         ## dsc
252         for i in [ "source", "version", "maintainer", "fingerprint",
253                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
254             if dsc.has_key(i):
255                 d_dsc[i] = dsc[i]
256         ## dsc_files
257         for file_entry in dsc_files.keys():
258             d_dsc_files[file_entry] = {}
259             # Mandatory dsc_files fields
260             for i in [ "size", "md5sum" ]:
261                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
262             # Optional dsc_files fields
263             for i in [ "files id" ]:
264                 if dsc_files[file_entry].has_key(i):
265                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
266
267         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
268                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
269             p.dump(i)
270         dump_file.close()
271
272     ###########################################################################
273
274     # Set up the per-package template substitution mappings
275
276     def update_subst (self, reject_message = ""):
277         Subst = self.Subst
278         changes = self.pkg.changes
279         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
280         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
281             changes["architecture"] = { "Unknown" : "" }
282         # and maintainer2047 may not exist.
283         if not changes.has_key("maintainer2047"):
284             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
285
286         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
287         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
288         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
289
290         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
291         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
292             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
293             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
294                                                      changes["maintainer2047"])
295             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
296         else:
297             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
298             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
299             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
300
301         if "sponsoremail" in changes:
302             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
303
304         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
305             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
306
307         # Apply any global override of the Maintainer field
308         if self.Cnf.get("Dinstall::OverrideMaintainer"):
309             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
310             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
311
312         Subst["__REJECT_MESSAGE__"] = reject_message
313         Subst["__SOURCE__"] = changes.get("source", "Unknown")
314         Subst["__VERSION__"] = changes.get("version", "Unknown")
315
316     ###########################################################################
317
318     def build_summaries(self):
319         changes = self.pkg.changes
320         files = self.pkg.files
321
322         byhand = summary = new = ""
323
324         # changes["distribution"] may not exist in corner cases
325         # (e.g. unreadable changes files)
326         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
327             changes["distribution"] = {}
328
329         override_summary ="";
330         file_keys = files.keys()
331         file_keys.sort()
332         for file_entry in file_keys:
333             if files[file_entry].has_key("byhand"):
334                 byhand = 1
335                 summary += file_entry + " byhand\n"
336             elif files[file_entry].has_key("new"):
337                 new = 1
338                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
339                 if files[file_entry].has_key("othercomponents"):
340                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
341                 if files[file_entry]["type"] == "deb":
342                     deb_fh = utils.open_file(file_entry)
343                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
344                     deb_fh.close()
345             else:
346                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
347                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
348                 summary += file_entry + "\n  to " + destination + "\n"
349                 if not files[file_entry].has_key("type"):
350                     files[file_entry]["type"] = "unknown"
351                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
352                     # (queue/unchecked), there we have override entries already, use them
353                     # (process-new), there we dont have override entries, use the newly generated ones.
354                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
355                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
356                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
357
358         short_summary = summary
359
360         # This is for direport's benefit...
361         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
362
363         if byhand or new:
364             summary += "Changes: " + f
365
366         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
367
368         summary += self.announce(short_summary, 0)
369
370         return (summary, short_summary)
371
372     ###########################################################################
373
374     def close_bugs (self, summary, action):
375         changes = self.pkg.changes
376         Subst = self.Subst
377         Cnf = self.Cnf
378
379         bugs = changes["closes"].keys()
380
381         if not bugs:
382             return summary
383
384         bugs.sort()
385         summary += "Closing bugs: "
386         for bug in bugs:
387             summary += "%s " % (bug)
388             if action:
389                 Subst["__BUG_NUMBER__"] = bug
390                 if changes["distribution"].has_key("stable"):
391                     Subst["__STABLE_WARNING__"] = """
392 Note that this package is not part of the released stable Debian
393 distribution.  It may have dependencies on other unreleased software,
394 or other instabilities.  Please take care if you wish to install it.
395 The update will eventually make its way into the next released Debian
396 distribution."""
397                 else:
398                     Subst["__STABLE_WARNING__"] = ""
399                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
400                     utils.send_mail (mail_message)
401         if action:
402             self.Logger.log(["closing bugs"]+bugs)
403         summary += "\n"
404
405         return summary
406
407     ###########################################################################
408
409     def announce (self, short_summary, action):
410         Subst = self.Subst
411         Cnf = self.Cnf
412         changes = self.pkg.changes
413
414         # Only do announcements for source uploads with a recent dpkg-dev installed
415         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
416             return ""
417
418         lists_done = {}
419         summary = ""
420         Subst["__SHORT_SUMMARY__"] = short_summary
421
422         for dist in changes["distribution"].keys():
423             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
424             if announce_list == "" or lists_done.has_key(announce_list):
425                 continue
426             lists_done[announce_list] = 1
427             summary += "Announcing to %s\n" % (announce_list)
428
429             if action:
430                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
431                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
432                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
433                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
434                 utils.send_mail (mail_message)
435
436         if Cnf.FindB("Dinstall::CloseBugs"):
437             summary = self.close_bugs(summary, action)
438
439         return summary
440
441     ###########################################################################
442
443     def accept (self, summary, short_summary):
444         Cnf = self.Cnf
445         Subst = self.Subst
446         files = self.pkg.files
447         changes = self.pkg.changes
448         changes_file = self.pkg.changes_file
449         dsc = self.pkg.dsc
450
451         print "Accepting."
452         self.Logger.log(["Accepting changes",changes_file])
453
454         self.dump_vars(Cnf["Dir::Queue::Accepted"])
455
456         # Move all the files into the accepted directory
457         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
458         file_keys = files.keys()
459         for file_entry in file_keys:
460             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
461             self.accept_bytes += float(files[file_entry]["size"])
462         self.accept_count += 1
463
464         # Send accept mail, announce to lists, close bugs and check for
465         # override disparities
466         if not Cnf["Dinstall::Options::No-Mail"]:
467             Subst["__SUITE__"] = ""
468             Subst["__SUMMARY__"] = summary
469             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
470             utils.send_mail(mail_message)
471             self.announce(short_summary, 1)
472
473
474         ## Helper stuff for DebBugs Version Tracking
475         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
476             # ??? once queue/* is cleared on *.d.o and/or reprocessed
477             # the conditionalization on dsc["bts changelog"] should be
478             # dropped.
479
480             # Write out the version history from the changelog
481             if changes["architecture"].has_key("source") and \
482                dsc.has_key("bts changelog"):
483
484                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
485                                                     dotprefix=1, perms=0644)
486                 version_history = utils.open_file(temp_filename, 'w')
487                 version_history.write(dsc["bts changelog"])
488                 version_history.close()
489                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
490                                       changes_file[:-8]+".versions")
491                 os.rename(temp_filename, filename)
492
493             # Write out the binary -> source mapping.
494             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
495                                                 dotprefix=1, perms=0644)
496             debinfo = utils.open_file(temp_filename, 'w')
497             for file_entry in file_keys:
498                 f = files[file_entry]
499                 if f["type"] == "deb":
500                     line = " ".join([f["package"], f["version"],
501                                      f["architecture"], f["source package"],
502                                      f["source version"]])
503                     debinfo.write(line+"\n")
504             debinfo.close()
505             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
506                                   changes_file[:-8]+".debinfo")
507             os.rename(temp_filename, filename)
508
509         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
510
511     ###########################################################################
512
513     def queue_build (self, queue, path):
514         Cnf = self.Cnf
515         Subst = self.Subst
516         files = self.pkg.files
517         changes = self.pkg.changes
518         changes_file = self.pkg.changes_file
519         dsc = self.pkg.dsc
520         file_keys = files.keys()
521
522         ## Special support to enable clean auto-building of queued packages
523         queue_id = database.get_or_set_queue_id(queue)
524
525         self.projectB.query("BEGIN WORK")
526         for suite in changes["distribution"].keys():
527             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
528                 continue
529             suite_id = database.get_suite_id(suite)
530             dest_dir = Cnf["Dir::QueueBuild"]
531             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
532                 dest_dir = os.path.join(dest_dir, suite)
533             for file_entry in file_keys:
534                 src = os.path.join(path, file_entry)
535                 dest = os.path.join(dest_dir, file_entry)
536                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
537                     # Copy it since the original won't be readable by www-data
538                     utils.copy(src, dest)
539                 else:
540                     # Create a symlink to it
541                     os.symlink(src, dest)
542                 # Add it to the list of packages for later processing by apt-ftparchive
543                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
544             # If the .orig.tar.gz is in the pool, create a symlink to
545             # it (if one doesn't already exist)
546             if self.pkg.orig_tar_id:
547                 # Determine the .orig.tar.gz file name
548                 for dsc_file in self.pkg.dsc_files.keys():
549                     if dsc_file.endswith(".orig.tar.gz"):
550                         filename = dsc_file
551                 dest = os.path.join(dest_dir, filename)
552                 # If it doesn't exist, create a symlink
553                 if not os.path.exists(dest):
554                     # Find the .orig.tar.gz in the pool
555                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
556                     ql = q.getresult()
557                     if not ql:
558                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
559                     src = os.path.join(ql[0][0], ql[0][1])
560                     os.symlink(src, dest)
561                     # Add it to the list of packages for later processing by apt-ftparchive
562                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
563                 # if it does, update things to ensure it's not removed prematurely
564                 else:
565                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
566
567         self.projectB.query("COMMIT WORK")
568
569     ###########################################################################
570
571     def check_override (self):
572         Subst = self.Subst
573         changes = self.pkg.changes
574         files = self.pkg.files
575         Cnf = self.Cnf
576
577         # Abandon the check if:
578         #  a) it's a non-sourceful upload
579         #  b) override disparity checks have been disabled
580         #  c) we're not sending mail
581         if not changes["architecture"].has_key("source") or \
582            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
583            Cnf["Dinstall::Options::No-Mail"]:
584             return
585
586         summary = ""
587         file_keys = files.keys()
588         file_keys.sort()
589         for file_entry in file_keys:
590             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
591                 section = files[file_entry]["section"]
592                 override_section = files[file_entry]["override section"]
593                 if section.lower() != override_section.lower() and section != "-":
594                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
595                 priority = files[file_entry]["priority"]
596                 override_priority = files[file_entry]["override priority"]
597                 if priority != override_priority and priority != "-":
598                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
599
600         if summary == "":
601             return
602
603         Subst["__SUMMARY__"] = summary
604         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
605         utils.send_mail(mail_message)
606
607     ###########################################################################
608
609     def force_reject (self, files):
610         """Forcefully move files from the current directory to the
611            reject directory.  If any file already exists in the reject
612            directory it will be moved to the morgue to make way for
613            the new file."""
614
615         Cnf = self.Cnf
616
617         for file_entry in files:
618             # Skip any files which don't exist or which we don't have permission to copy.
619             if os.access(file_entry,os.R_OK) == 0:
620                 continue
621             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
622             try:
623                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
624             except OSError, e:
625                 # File exists?  Let's try and move it to the morgue
626                 if errno.errorcode[e.errno] == 'EEXIST':
627                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
628                     try:
629                         morgue_file = utils.find_next_free(morgue_file)
630                     except NoFreeFilenameError:
631                         # Something's either gone badly Pete Tong, or
632                         # someone is trying to exploit us.
633                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
634                         return
635                     utils.move(dest_file, morgue_file, perms=0660)
636                     try:
637                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
638                     except OSError, e:
639                         # Likewise
640                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
641                         return
642                 else:
643                     raise
644             # If we got here, we own the destination file, so we can
645             # safely overwrite it.
646             utils.move(file_entry, dest_file, 1, perms=0660)
647             os.close(dest_fd)
648
649     ###########################################################################
650
651     def do_reject (self, manual = 0, reject_message = ""):
652         # If we weren't given a manual rejection message, spawn an
653         # editor so the user can add one in...
654         if manual and not reject_message:
655             temp_filename = utils.temp_filename()
656             editor = os.environ.get("EDITOR","vi")
657             answer = 'E'
658             while answer == 'E':
659                 os.system("%s %s" % (editor, temp_filename))
660                 temp_fh = utils.open_file(temp_filename)
661                 reject_message = "".join(temp_fh.readlines())
662                 temp_fh.close()
663                 print "Reject message:"
664                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
665                 prompt = "[R]eject, Edit, Abandon, Quit ?"
666                 answer = "XXX"
667                 while prompt.find(answer) == -1:
668                     answer = utils.our_raw_input(prompt)
669                     m = re_default_answer.search(prompt)
670                     if answer == "":
671                         answer = m.group(1)
672                     answer = answer[:1].upper()
673             os.unlink(temp_filename)
674             if answer == 'A':
675                 return 1
676             elif answer == 'Q':
677                 sys.exit(0)
678
679         print "Rejecting.\n"
680
681         Cnf = self.Cnf
682         Subst = self.Subst
683         pkg = self.pkg
684
685         reason_filename = pkg.changes_file[:-8] + ".reason"
686         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
687
688         # Move all the files into the reject directory
689         reject_files = pkg.files.keys() + [pkg.changes_file]
690         self.force_reject(reject_files)
691
692         # If we fail here someone is probably trying to exploit the race
693         # so let's just raise an exception ...
694         if os.path.exists(reason_filename):
695             os.unlink(reason_filename)
696         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
697
698         if not manual:
699             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
700             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
701             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
702             os.write(reason_fd, reject_message)
703             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
704         else:
705             # Build up the rejection email
706             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
707
708             Subst["__REJECTOR_ADDRESS__"] = user_email_address
709             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
710             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
711             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
712             # Write the rejection email out as the <foo>.reason file
713             os.write(reason_fd, reject_mail_message)
714
715         os.close(reason_fd)
716
717         # Send the rejection mail if appropriate
718         if not Cnf["Dinstall::Options::No-Mail"]:
719             utils.send_mail(reject_mail_message)
720
721         self.Logger.log(["rejected", pkg.changes_file])
722         return 0
723
724     ################################################################################
725
726     # Ensure that source exists somewhere in the archive for the binary
727     # upload being processed.
728     #
729     # (1) exact match                      => 1.0-3
730     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
731
732     def source_exists (self, package, source_version, suites = ["any"]):
733         okay = 1
734         for suite in suites:
735             if suite == "any":
736                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
737                     (package)
738             else:
739                 # source must exist in suite X, or in some other suite that's
740                 # mapped to X, recursively... silent-maps are counted too,
741                 # unreleased-maps aren't.
742                 maps = self.Cnf.ValueList("SuiteMappings")[:]
743                 maps.reverse()
744                 maps = [ m.split() for m in maps ]
745                 maps = [ (x[1], x[2]) for x in maps
746                                 if x[0] == "map" or x[0] == "silent-map" ]
747                 s = [suite]
748                 for x in maps:
749                     if x[1] in s and x[0] not in s:
750                         s.append(x[0])
751
752                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
753             q = self.projectB.query(que)
754
755             # Reduce the query results to a list of version numbers
756             ql = [ i[0] for i in q.getresult() ]
757
758             # Try (1)
759             if source_version in ql:
760                 continue
761
762             # Try (2)
763             orig_source_version = re_bin_only_nmu.sub('', source_version)
764             if orig_source_version in ql:
765                 continue
766
767             # No source found...
768             okay = 0
769             break
770         return okay
771
772     ################################################################################
773
774     def in_override_p (self, package, component, suite, binary_type, file):
775         files = self.pkg.files
776
777         if binary_type == "": # must be source
778             file_type = "dsc"
779         else:
780             file_type = binary_type
781
782         # Override suite name; used for example with proposed-updates
783         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
784             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
785
786         # Avoid <undef> on unknown distributions
787         suite_id = database.get_suite_id(suite)
788         if suite_id == -1:
789             return None
790         component_id = database.get_component_id(component)
791         type_id = database.get_override_type_id(file_type)
792
793         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
794                            % (package, suite_id, component_id, type_id))
795         result = q.getresult()
796         # If checking for a source package fall back on the binary override type
797         if file_type == "dsc" and not result:
798             deb_type_id = database.get_override_type_id("deb")
799             udeb_type_id = database.get_override_type_id("udeb")
800             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
801                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
802             result = q.getresult()
803
804         # Remember the section and priority so we can check them later if appropriate
805         if result:
806             files[file]["override section"] = result[0][0]
807             files[file]["override priority"] = result[0][1]
808
809         return result
810
811     ################################################################################
812
813     def reject (self, str, prefix="Rejected: "):
814         if str:
815             # Unlike other rejects we add new lines first to avoid trailing
816             # new lines when this message is passed back up to a caller.
817             if self.reject_message:
818                 self.reject_message += "\n"
819             self.reject_message += prefix + str
820
821     ################################################################################
822
823     def get_anyversion(self, query_result, suite):
824         anyversion=None
825         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
826         for (v, s) in query_result:
827             if s in [ x.lower() for x in anysuite ]:
828                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
829                     anyversion=v
830         return anyversion
831
832     ################################################################################
833
834     def cross_suite_version_check(self, query_result, file, new_version):
835         """Ensure versions are newer than existing packages in target
836         suites and that cross-suite version checking rules as
837         set out in the conf file are satisfied."""
838
839         # Check versions for each target suite
840         for target_suite in self.pkg.changes["distribution"].keys():
841             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
842             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
843             # Enforce "must be newer than target suite" even if conffile omits it
844             if target_suite not in must_be_newer_than:
845                 must_be_newer_than.append(target_suite)
846             for entry in query_result:
847                 existent_version = entry[0]
848                 suite = entry[1]
849                 if suite in must_be_newer_than and \
850                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
851                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
852                 if suite in must_be_older_than and \
853                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
854                     ch = self.pkg.changes
855                     cansave = 0
856                     if ch.get('distribution-version', {}).has_key(suite):
857                     # we really use the other suite, ignoring the conflicting one ...
858                         addsuite = ch["distribution-version"][suite]
859
860                         add_version = self.get_anyversion(query_result, addsuite)
861                         target_version = self.get_anyversion(query_result, target_suite)
862
863                         if not add_version:
864                             # not add_version can only happen if we map to a suite
865                             # that doesn't enhance the suite we're propup'ing from.
866                             # so "propup-ver x a b c; map a d" is a problem only if
867                             # d doesn't enhance a.
868                             #
869                             # i think we could always propagate in this case, rather
870                             # than complaining. either way, this isn't a REJECT issue
871                             #
872                             # And - we really should complain to the dorks who configured dak
873                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
874                             self.pkg.changes.setdefault("propdistribution", {})
875                             self.pkg.changes["propdistribution"][addsuite] = 1
876                             cansave = 1
877                         elif not target_version:
878                             # not targets_version is true when the package is NEW
879                             # we could just stick with the "...old version..." REJECT
880                             # for this, I think.
881                             self.reject("Won't propogate NEW packages.")
882                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
883                             # propogation would be redundant. no need to reject though.
884                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
885                             cansave = 1
886                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
887                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
888                             # propogate!!
889                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
890                             self.pkg.changes.setdefault("propdistribution", {})
891                             self.pkg.changes["propdistribution"][addsuite] = 1
892                             cansave = 1
893
894                     if not cansave:
895                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
896
897     ################################################################################
898
899     def check_binary_against_db(self, file):
900         self.reject_message = ""
901         files = self.pkg.files
902
903         # Ensure version is sane
904         q = self.projectB.query("""
905 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
906                                      architecture a
907  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
908    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
909                                 % (files[file]["package"],
910                                    files[file]["architecture"]))
911         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
912
913         # Check for any existing copies of the file
914         q = self.projectB.query("""
915 SELECT b.id FROM binaries b, architecture a
916  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
917    AND a.id = b.architecture"""
918                                 % (files[file]["package"],
919                                    files[file]["version"],
920                                    files[file]["architecture"]))
921         if q.getresult():
922             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
923
924         return self.reject_message
925
926     ################################################################################
927
928     def check_source_against_db(self, file):
929         self.reject_message = ""
930         dsc = self.pkg.dsc
931
932         # Ensure version is sane
933         q = self.projectB.query("""
934 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
935  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
936         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
937
938         return self.reject_message
939
940     ################################################################################
941
942     # **WARNING**
943     # NB: this function can remove entries from the 'files' index [if
944     # the .orig.tar.gz is a duplicate of the one in the archive]; if
945     # you're iterating over 'files' and call this function as part of
946     # the loop, be sure to add a check to the top of the loop to
947     # ensure you haven't just tried to dereference the deleted entry.
948     # **WARNING**
949
950     def check_dsc_against_db(self, file):
951         self.reject_message = ""
952         files = self.pkg.files
953         dsc_files = self.pkg.dsc_files
954         legacy_source_untouchable = self.pkg.legacy_source_untouchable
955         self.pkg.orig_tar_gz = None
956
957         # Try and find all files mentioned in the .dsc.  This has
958         # to work harder to cope with the multiple possible
959         # locations of an .orig.tar.gz.
960         # The ordering on the select is needed to pick the newest orig
961         # when it exists in multiple places.
962         for dsc_file in dsc_files.keys():
963             found = None
964             if files.has_key(dsc_file):
965                 actual_md5 = files[dsc_file]["md5sum"]
966                 actual_size = int(files[dsc_file]["size"])
967                 found = "%s in incoming" % (dsc_file)
968                 # Check the file does not already exist in the archive
969                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
970                 ql = q.getresult()
971                 # Strip out anything that isn't '%s' or '/%s$'
972                 for i in ql:
973                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
974                         ql.remove(i)
975
976                 # "[dak] has not broken them.  [dak] has fixed a
977                 # brokenness.  Your crappy hack exploited a bug in
978                 # the old dinstall.
979                 #
980                 # "(Come on!  I thought it was always obvious that
981                 # one just doesn't release different files with
982                 # the same name and version.)"
983                 #                        -- ajk@ on d-devel@l.d.o
984
985                 if ql:
986                     # Ignore exact matches for .orig.tar.gz
987                     match = 0
988                     if dsc_file.endswith(".orig.tar.gz"):
989                         for i in ql:
990                             if files.has_key(dsc_file) and \
991                                int(files[dsc_file]["size"]) == int(i[0]) and \
992                                files[dsc_file]["md5sum"] == i[1]:
993                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
994                                 del files[dsc_file]
995                                 self.pkg.orig_tar_gz = i[2] + i[3]
996                                 match = 1
997
998                     if not match:
999                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1000             elif dsc_file.endswith(".orig.tar.gz"):
1001                 # Check in the pool
1002                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1003                 ql = q.getresult()
1004                 # Strip out anything that isn't '%s' or '/%s$'
1005                 for i in ql:
1006                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1007                         ql.remove(i)
1008
1009                 if ql:
1010                     # Unfortunately, we may get more than one match here if,
1011                     # for example, the package was in potato but had an -sa
1012                     # upload in woody.  So we need to choose the right one.
1013
1014                     x = ql[0]; # default to something sane in case we don't match any or have only one
1015
1016                     if len(ql) > 1:
1017                         for i in ql:
1018                             old_file = i[0] + i[1]
1019                             old_file_fh = utils.open_file(old_file)
1020                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1021                             old_file_fh.close()
1022                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1023                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1024                                 x = i
1025                             else:
1026                                 legacy_source_untouchable[i[3]] = ""
1027
1028                     old_file = x[0] + x[1]
1029                     old_file_fh = utils.open_file(old_file)
1030                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1031                     old_file_fh.close()
1032                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1033                     found = old_file
1034                     suite_type = x[2]
1035                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1036                     # See install() in process-accepted...
1037                     self.pkg.orig_tar_id = x[3]
1038                     self.pkg.orig_tar_gz = old_file
1039                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1040                         self.pkg.orig_tar_location = "legacy"
1041                     else:
1042                         self.pkg.orig_tar_location = x[4]
1043                 else:
1044                     # Not there? Check the queue directories...
1045
1046                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1047                     # See process_it() in 'dak process-unchecked' for explanation of this
1048                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1049                     # ever make sense?
1050                     if os.path.exists(in_unchecked) and False:
1051                         return (self.reject_message, in_unchecked)
1052                     else:
1053                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1054                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1055                             if os.path.exists(in_otherdir):
1056                                 in_otherdir_fh = utils.open_file(in_otherdir)
1057                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1058                                 in_otherdir_fh.close()
1059                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1060                                 found = in_otherdir
1061                                 self.pkg.orig_tar_gz = in_otherdir
1062
1063                     if not found:
1064                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1065                         self.pkg.orig_tar_gz = -1
1066                         continue
1067             else:
1068                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1069                 continue
1070             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1071                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1072             if actual_size != int(dsc_files[dsc_file]["size"]):
1073                 self.reject("size for %s doesn't match %s." % (found, file))
1074
1075         return (self.reject_message, None)
1076
1077     def do_query(self, q):
1078         sys.stderr.write("query: \"%s\" ... " % (q))
1079         before = time.time()
1080         r = self.projectB.query(q)
1081         time_diff = time.time()-before
1082         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1083         return r