]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Fix Thomas patch so that it works in all cases
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ################################################################################
36
37 # Determine what parts in a .changes are NEW
38
39 def determine_new(changes, files, projectB, warn=1):
40     new = {}
41
42     # Build up a list of potentially new things
43     for file in files.keys():
44         f = files[file]
45         # Skip byhand elements
46         if f["type"] == "byhand":
47             continue
48         pkg = f["package"]
49         priority = f["priority"]
50         section = f["section"]
51         type = get_type(f)
52         component = f["component"]
53
54         if type == "dsc":
55             priority = "source"
56         if not new.has_key(pkg):
57             new[pkg] = {}
58             new[pkg]["priority"] = priority
59             new[pkg]["section"] = section
60             new[pkg]["type"] = type
61             new[pkg]["component"] = component
62             new[pkg]["files"] = []
63         else:
64             old_type = new[pkg]["type"]
65             if old_type != type:
66                 # source gets trumped by deb or udeb
67                 if old_type == "dsc":
68                     new[pkg]["priority"] = priority
69                     new[pkg]["section"] = section
70                     new[pkg]["type"] = type
71                     new[pkg]["component"] = component
72         new[pkg]["files"].append(file)
73         if f.has_key("othercomponents"):
74             new[pkg]["othercomponents"] = f["othercomponents"]
75
76     for suite in changes["suite"].keys():
77         suite_id = database.get_suite_id(suite)
78         for pkg in new.keys():
79             component_id = database.get_component_id(new[pkg]["component"])
80             type_id = database.get_override_type_id(new[pkg]["type"])
81             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
82             ql = q.getresult()
83             if ql:
84                 for file in new[pkg]["files"]:
85                     if files[file].has_key("new"):
86                         del files[file]["new"]
87                 del new[pkg]
88
89     if warn:
90         if changes["suite"].has_key("stable"):
91             print "WARNING: overrides will be added for stable!"
92             if changes["suite"].has_key("oldstable"):
93                 print "WARNING: overrides will be added for OLDstable!"
94         for pkg in new.keys():
95             if new[pkg].has_key("othercomponents"):
96                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
97
98     return new
99
100 ################################################################################
101
102 def get_type(f):
103     # Determine the type
104     if f.has_key("dbtype"):
105         type = f["dbtype"]
106     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
107         type = "dsc"
108     else:
109         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (type))
110
111     # Validate the override type
112     type_id = database.get_override_type_id(type)
113     if type_id == -1:
114         utils.fubar("invalid type (%s) for new.  Say wha?" % (type))
115
116     return type
117
118 ################################################################################
119
120 # check if section/priority values are valid
121
122 def check_valid(new):
123     for pkg in new.keys():
124         section = new[pkg]["section"]
125         priority = new[pkg]["priority"]
126         type = new[pkg]["type"]
127         new[pkg]["section id"] = database.get_section_id(section)
128         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
129         # Sanity checks
130         di = section.find("debian-installer") != -1
131         if (di and type != "udeb") or (not di and type == "udeb"):
132             new[pkg]["section id"] = -1
133         if (priority == "source" and type != "dsc") or \
134            (priority != "source" and type == "dsc"):
135             new[pkg]["priority id"] = -1
136
137
138 ###############################################################################
139
140 # Convenience wrapper to carry around all the package information in
141
142 class Pkg:
143     def __init__(self, **kwds):
144         self.__dict__.update(kwds)
145
146     def update(self, **kwds):
147         self.__dict__.update(kwds)
148
149 ###############################################################################
150
151 class Upload:
152
153     def __init__(self, Cnf):
154         self.Cnf = Cnf
155         self.accept_count = 0
156         self.accept_bytes = 0L
157         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158                        legacy_source_untouchable = {})
159
160         # Initialize the substitution template mapping global
161         Subst = self.Subst = {}
162         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
166
167         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168         database.init(Cnf, self.projectB)
169
170     ###########################################################################
171
172     def init_vars (self):
173         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174             exec "self.pkg.%s.clear();" % (i)
175         self.pkg.orig_tar_id = None
176         self.pkg.orig_tar_location = ""
177         self.pkg.orig_tar_gz = None
178
179     ###########################################################################
180
181     def update_vars (self):
182         dump_filename = self.pkg.changes_file[:-8]+".dak"
183         dump_file = utils.open_file(dump_filename)
184         p = cPickle.Unpickler(dump_file)
185         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186             exec "self.pkg.%s.update(p.load());" % (i)
187         for i in [ "orig_tar_id", "orig_tar_location" ]:
188             exec "self.pkg.%s = p.load();" % (i)
189         dump_file.close()
190
191     ###########################################################################
192
193     # This could just dump the dictionaries as is, but I'd like to
194     # avoid this so there's some idea of what process-accepted &
195     # process-new use from process-unchecked
196
197     def dump_vars(self, dest_dir):
198         for i in [ "changes", "dsc", "files", "dsc_files",
199                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200             exec "%s = self.pkg.%s;" % (i,i)
201         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202         dump_file = utils.open_file(dump_filename, 'w')
203         try:
204             os.chmod(dump_filename, 0660)
205         except OSError, e:
206             if errno.errorcode[e.errno] == 'EPERM':
207                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208                 if perms & stat.S_IROTH:
209                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
210             else:
211                 raise
212
213         p = cPickle.Pickler(dump_file, 1)
214         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
215             exec "%s = {}" % i
216         ## files
217         for file in files.keys():
218             d_files[file] = {}
219             for i in [ "package", "version", "architecture", "type", "size",
220                        "md5sum", "component", "location id", "source package",
221                        "source version", "maintainer", "dbtype", "files id",
222                        "new", "section", "priority", "othercomponents",
223                        "pool name", "original component" ]:
224                 if files[file].has_key(i):
225                     d_files[file][i] = files[file][i]
226         ## changes
227         # Mandatory changes fields
228         for i in [ "distribution", "source", "architecture", "version",
229                    "maintainer", "urgency", "fingerprint", "changedby822",
230                    "changedby2047", "changedbyname", "maintainer822",
231                    "maintainer2047", "maintainername", "maintaineremail",
232                    "closes", "changes" ]:
233             d_changes[i] = changes[i]
234         # Optional changes fields
235         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236             if changes.has_key(i):
237                 d_changes[i] = changes[i]
238         ## dsc
239         for i in [ "source", "version", "maintainer", "fingerprint",
240                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
241             if dsc.has_key(i):
242                 d_dsc[i] = dsc[i]
243         ## dsc_files
244         for file in dsc_files.keys():
245             d_dsc_files[file] = {}
246             # Mandatory dsc_files fields
247             for i in [ "size", "md5sum" ]:
248                 d_dsc_files[file][i] = dsc_files[file][i]
249             # Optional dsc_files fields
250             for i in [ "files id" ]:
251                 if dsc_files[file].has_key(i):
252                     d_dsc_files[file][i] = dsc_files[file][i]
253
254         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
256             p.dump(i)
257         dump_file.close()
258
259     ###########################################################################
260
261     # Set up the per-package template substitution mappings
262
263     def update_subst (self, reject_message = ""):
264         Subst = self.Subst
265         changes = self.pkg.changes
266         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268             changes["architecture"] = { "Unknown" : "" }
269         # and maintainer2047 may not exist.
270         if not changes.has_key("maintainer2047"):
271             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
272
273         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
276
277         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281                                                      changes["maintainer2047"])
282             if "sponsoremail" in changes:
283                 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
284             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
285         else:
286             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288             if "sponsoremail" in changes:
289                 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
290             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
291         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
292             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
293
294         # Apply any global override of the Maintainer field
295         if self.Cnf.get("Dinstall::OverrideMaintainer"):
296             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
297             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
298
299         Subst["__REJECT_MESSAGE__"] = reject_message
300         Subst["__SOURCE__"] = changes.get("source", "Unknown")
301         Subst["__VERSION__"] = changes.get("version", "Unknown")
302
303     ###########################################################################
304
305     def build_summaries(self):
306         changes = self.pkg.changes
307         files = self.pkg.files
308
309         byhand = summary = new = ""
310
311         # changes["distribution"] may not exist in corner cases
312         # (e.g. unreadable changes files)
313         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
314             changes["distribution"] = {}
315
316         override_summary ="";
317         file_keys = files.keys()
318         file_keys.sort()
319         for file in file_keys:
320             if files[file].has_key("byhand"):
321                 byhand = 1
322                 summary += file + " byhand\n"
323             elif files[file].has_key("new"):
324                 new = 1
325                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
326                 if files[file].has_key("othercomponents"):
327                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
328                 if files[file]["type"] == "deb":
329                     deb_fh = utils.open_file(file)
330                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
331                     deb_fh.close()
332             else:
333                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
334                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
335                 summary += file + "\n  to " + destination + "\n"
336                 if not files[file].has_key("type"):
337                     files[file]["type"] = "unknown"
338                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
339                     # (queue/unchecked), there we have override entries already, use them
340                     # (process-new), there we dont have override entries, use the newly generated ones.
341                     override_prio = files[file].get("override priority", files[file]["priority"])
342                     override_sect = files[file].get("override section", files[file]["section"])
343                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
344
345         short_summary = summary
346
347         # This is for direport's benefit...
348         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
349
350         if byhand or new:
351             summary += "Changes: " + f
352
353         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
354
355         summary += self.announce(short_summary, 0)
356
357         return (summary, short_summary)
358
359     ###########################################################################
360
361     def close_bugs (self, summary, action):
362         changes = self.pkg.changes
363         Subst = self.Subst
364         Cnf = self.Cnf
365
366         bugs = changes["closes"].keys()
367
368         if not bugs:
369             return summary
370
371         bugs.sort()
372         summary += "Closing bugs: "
373         for bug in bugs:
374             summary += "%s " % (bug)
375             if action:
376                 Subst["__BUG_NUMBER__"] = bug
377                 if changes["distribution"].has_key("stable"):
378                     Subst["__STABLE_WARNING__"] = """
379 Note that this package is not part of the released stable Debian
380 distribution.  It may have dependencies on other unreleased software,
381 or other instabilities.  Please take care if you wish to install it.
382 The update will eventually make its way into the next released Debian
383 distribution."""
384                 else:
385                     Subst["__STABLE_WARNING__"] = ""
386                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
387                     utils.send_mail (mail_message)
388         if action:
389             self.Logger.log(["closing bugs"]+bugs)
390         summary += "\n"
391
392         return summary
393
394     ###########################################################################
395
396     def announce (self, short_summary, action):
397         Subst = self.Subst
398         Cnf = self.Cnf
399         changes = self.pkg.changes
400
401         # Only do announcements for source uploads with a recent dpkg-dev installed
402         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
403             return ""
404
405         lists_done = {}
406         summary = ""
407         Subst["__SHORT_SUMMARY__"] = short_summary
408
409         for dist in changes["distribution"].keys():
410             list = Cnf.Find("Suite::%s::Announce" % (dist))
411             if list == "" or lists_done.has_key(list):
412                 continue
413             lists_done[list] = 1
414             summary += "Announcing to %s\n" % (list)
415
416             if action:
417                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
418                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
419                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
420                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
421                 utils.send_mail (mail_message)
422
423         if Cnf.FindB("Dinstall::CloseBugs"):
424             summary = self.close_bugs(summary, action)
425
426         return summary
427
428     ###########################################################################
429
430     def accept (self, summary, short_summary):
431         Cnf = self.Cnf
432         Subst = self.Subst
433         files = self.pkg.files
434         changes = self.pkg.changes
435         changes_file = self.pkg.changes_file
436         dsc = self.pkg.dsc
437
438         print "Accepting."
439         self.Logger.log(["Accepting changes",changes_file])
440
441         self.dump_vars(Cnf["Dir::Queue::Accepted"])
442
443         # Move all the files into the accepted directory
444         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
445         file_keys = files.keys()
446         for file in file_keys:
447             utils.move(file, Cnf["Dir::Queue::Accepted"])
448             self.accept_bytes += float(files[file]["size"])
449         self.accept_count += 1
450
451         # Send accept mail, announce to lists, close bugs and check for
452         # override disparities
453         if not Cnf["Dinstall::Options::No-Mail"]:
454             Subst["__SUITE__"] = ""
455             Subst["__SUMMARY__"] = summary
456             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
457             utils.send_mail(mail_message)
458             self.announce(short_summary, 1)
459
460
461         ## Helper stuff for DebBugs Version Tracking
462         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
463             # ??? once queue/* is cleared on *.d.o and/or reprocessed
464             # the conditionalization on dsc["bts changelog"] should be
465             # dropped.
466
467             # Write out the version history from the changelog
468             if changes["architecture"].has_key("source") and \
469                dsc.has_key("bts changelog"):
470
471                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
472                                                     dotprefix=1, perms=0644)
473                 version_history = utils.open_file(temp_filename, 'w')
474                 version_history.write(dsc["bts changelog"])
475                 version_history.close()
476                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
477                                       changes_file[:-8]+".versions")
478                 os.rename(temp_filename, filename)
479
480             # Write out the binary -> source mapping.
481             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
482                                                 dotprefix=1, perms=0644)
483             debinfo = utils.open_file(temp_filename, 'w')
484             for file in file_keys:
485                 f = files[file]
486                 if f["type"] == "deb":
487                     line = " ".join([f["package"], f["version"],
488                                      f["architecture"], f["source package"],
489                                      f["source version"]])
490                     debinfo.write(line+"\n")
491             debinfo.close()
492             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
493                                   changes_file[:-8]+".debinfo")
494             os.rename(temp_filename, filename)
495
496         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
497
498     ###########################################################################
499
500     def queue_build (self, queue, path):
501         Cnf = self.Cnf
502         Subst = self.Subst
503         files = self.pkg.files
504         changes = self.pkg.changes
505         changes_file = self.pkg.changes_file
506         dsc = self.pkg.dsc
507         file_keys = files.keys()
508
509         ## Special support to enable clean auto-building of queued packages
510         queue_id = database.get_or_set_queue_id(queue)
511
512         self.projectB.query("BEGIN WORK")
513         for suite in changes["distribution"].keys():
514             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
515                 continue
516             suite_id = database.get_suite_id(suite)
517             dest_dir = Cnf["Dir::QueueBuild"]
518             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
519                 dest_dir = os.path.join(dest_dir, suite)
520             for file in file_keys:
521                 src = os.path.join(path, file)
522                 dest = os.path.join(dest_dir, file)
523                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
524                     # Copy it since the original won't be readable by www-data
525                     utils.copy(src, dest)
526                 else:
527                     # Create a symlink to it
528                     os.symlink(src, dest)
529                 # Add it to the list of packages for later processing by apt-ftparchive
530                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
531             # If the .orig.tar.gz is in the pool, create a symlink to
532             # it (if one doesn't already exist)
533             if self.pkg.orig_tar_id:
534                 # Determine the .orig.tar.gz file name
535                 for dsc_file in self.pkg.dsc_files.keys():
536                     if dsc_file.endswith(".orig.tar.gz"):
537                         filename = dsc_file
538                 dest = os.path.join(dest_dir, filename)
539                 # If it doesn't exist, create a symlink
540                 if not os.path.exists(dest):
541                     # Find the .orig.tar.gz in the pool
542                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
543                     ql = q.getresult()
544                     if not ql:
545                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
546                     src = os.path.join(ql[0][0], ql[0][1])
547                     os.symlink(src, dest)
548                     # Add it to the list of packages for later processing by apt-ftparchive
549                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
550                 # if it does, update things to ensure it's not removed prematurely
551                 else:
552                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
553
554         self.projectB.query("COMMIT WORK")
555
556     ###########################################################################
557
558     def check_override (self):
559         Subst = self.Subst
560         changes = self.pkg.changes
561         files = self.pkg.files
562         Cnf = self.Cnf
563
564         # Abandon the check if:
565         #  a) it's a non-sourceful upload
566         #  b) override disparity checks have been disabled
567         #  c) we're not sending mail
568         if not changes["architecture"].has_key("source") or \
569            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
570            Cnf["Dinstall::Options::No-Mail"]:
571             return
572
573         summary = ""
574         file_keys = files.keys()
575         file_keys.sort()
576         for file in file_keys:
577             if not files[file].has_key("new") and files[file]["type"] == "deb":
578                 section = files[file]["section"]
579                 override_section = files[file]["override section"]
580                 if section.lower() != override_section.lower() and section != "-":
581                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
582                 priority = files[file]["priority"]
583                 override_priority = files[file]["override priority"]
584                 if priority != override_priority and priority != "-":
585                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
586
587         if summary == "":
588             return
589
590         Subst["__SUMMARY__"] = summary
591         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
592         utils.send_mail(mail_message)
593
594     ###########################################################################
595
596     def force_reject (self, files):
597         """Forcefully move files from the current directory to the
598            reject directory.  If any file already exists in the reject
599            directory it will be moved to the morgue to make way for
600            the new file."""
601
602         Cnf = self.Cnf
603
604         for file in files:
605             # Skip any files which don't exist or which we don't have permission to copy.
606             if os.access(file,os.R_OK) == 0:
607                 continue
608             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
609             try:
610                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
611             except OSError, e:
612                 # File exists?  Let's try and move it to the morgue
613                 if errno.errorcode[e.errno] == 'EEXIST':
614                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
615                     try:
616                         morgue_file = utils.find_next_free(morgue_file)
617                     except utils.tried_too_hard_exc:
618                         # Something's either gone badly Pete Tong, or
619                         # someone is trying to exploit us.
620                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
621                         return
622                     utils.move(dest_file, morgue_file, perms=0660)
623                     try:
624                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
625                     except OSError, e:
626                         # Likewise
627                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
628                         return
629                 else:
630                     raise
631             # If we got here, we own the destination file, so we can
632             # safely overwrite it.
633             utils.move(file, dest_file, 1, perms=0660)
634             os.close(dest_fd)
635
636     ###########################################################################
637
638     def do_reject (self, manual = 0, reject_message = ""):
639         # If we weren't given a manual rejection message, spawn an
640         # editor so the user can add one in...
641         if manual and not reject_message:
642             temp_filename = utils.temp_filename()
643             editor = os.environ.get("EDITOR","vi")
644             answer = 'E'
645             while answer == 'E':
646                 os.system("%s %s" % (editor, temp_filename))
647                 temp_fh = utils.open_file(temp_filename)
648                 reject_message = "".join(temp_fh.readlines())
649                 temp_fh.close()
650                 print "Reject message:"
651                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
652                 prompt = "[R]eject, Edit, Abandon, Quit ?"
653                 answer = "XXX"
654                 while prompt.find(answer) == -1:
655                     answer = utils.our_raw_input(prompt)
656                     m = re_default_answer.search(prompt)
657                     if answer == "":
658                         answer = m.group(1)
659                     answer = answer[:1].upper()
660             os.unlink(temp_filename)
661             if answer == 'A':
662                 return 1
663             elif answer == 'Q':
664                 sys.exit(0)
665
666         print "Rejecting.\n"
667
668         Cnf = self.Cnf
669         Subst = self.Subst
670         pkg = self.pkg
671
672         reason_filename = pkg.changes_file[:-8] + ".reason"
673         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
674
675         # Move all the files into the reject directory
676         reject_files = pkg.files.keys() + [pkg.changes_file]
677         self.force_reject(reject_files)
678
679         # If we fail here someone is probably trying to exploit the race
680         # so let's just raise an exception ...
681         if os.path.exists(reason_filename):
682             os.unlink(reason_filename)
683         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
684
685         if not manual:
686             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
687             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
688             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
689             os.write(reason_fd, reject_message)
690             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
691         else:
692             # Build up the rejection email
693             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
694
695             Subst["__REJECTOR_ADDRESS__"] = user_email_address
696             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
697             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
698             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
699             # Write the rejection email out as the <foo>.reason file
700             os.write(reason_fd, reject_mail_message)
701
702         os.close(reason_fd)
703
704         # Send the rejection mail if appropriate
705         if not Cnf["Dinstall::Options::No-Mail"]:
706             utils.send_mail(reject_mail_message)
707
708         self.Logger.log(["rejected", pkg.changes_file])
709         return 0
710
711     ################################################################################
712
713     # Ensure that source exists somewhere in the archive for the binary
714     # upload being processed.
715     #
716     # (1) exact match                      => 1.0-3
717     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
718
719     def source_exists (self, package, source_version, suites = ["any"]):
720         okay = 1
721         for suite in suites:
722             if suite == "any":
723                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
724                     (package)
725             else:
726                 # source must exist in suite X, or in some other suite that's
727                 # mapped to X, recursively... silent-maps are counted too,
728                 # unreleased-maps aren't.
729                 maps = self.Cnf.ValueList("SuiteMappings")[:]
730                 maps.reverse()
731                 maps = [ m.split() for m in maps ]
732                 maps = [ (x[1], x[2]) for x in maps
733                                 if x[0] == "map" or x[0] == "silent-map" ]
734                 s = [suite]
735                 for x in maps:
736                         if x[1] in s and x[0] not in s:
737                                 s.append(x[0])
738
739                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
740             q = self.projectB.query(que)
741
742             # Reduce the query results to a list of version numbers
743             ql = [ i[0] for i in q.getresult() ]
744
745             # Try (1)
746             if source_version in ql:
747                 continue
748
749             # Try (2)
750             orig_source_version = re_bin_only_nmu.sub('', source_version)
751             if orig_source_version in ql:
752                 continue
753
754             # No source found...
755             okay = 0
756             break
757         return okay
758
759     ################################################################################
760     
761     def in_override_p (self, package, component, suite, binary_type, file):
762         files = self.pkg.files
763
764         if binary_type == "": # must be source
765             type = "dsc"
766         else:
767             type = binary_type
768
769         # Override suite name; used for example with proposed-updates
770         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
771             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
772
773         # Avoid <undef> on unknown distributions
774         suite_id = database.get_suite_id(suite)
775         if suite_id == -1:
776             return None
777         component_id = database.get_component_id(component)
778         type_id = database.get_override_type_id(type)
779
780         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
781                            % (package, suite_id, component_id, type_id))
782         result = q.getresult()
783         # If checking for a source package fall back on the binary override type
784         if type == "dsc" and not result:
785             deb_type_id = database.get_override_type_id("deb")
786             udeb_type_id = database.get_override_type_id("udeb")
787             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
788                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
789             result = q.getresult()
790
791         # Remember the section and priority so we can check them later if appropriate
792         if result:
793             files[file]["override section"] = result[0][0]
794             files[file]["override priority"] = result[0][1]
795
796         return result
797
798     ################################################################################
799
800     def reject (self, str, prefix="Rejected: "):
801         if str:
802             # Unlike other rejects we add new lines first to avoid trailing
803             # new lines when this message is passed back up to a caller.
804             if self.reject_message:
805                 self.reject_message += "\n"
806             self.reject_message += prefix + str
807
808     ################################################################################
809
810     def get_anyversion(self, query_result, suite):
811         anyversion=None
812         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
813         for (v, s) in query_result:
814             if s in [ x.lower() for x in anysuite ]:
815                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
816                     anyversion=v
817         return anyversion
818
819     ################################################################################
820
821     def cross_suite_version_check(self, query_result, file, new_version):
822         """Ensure versions are newer than existing packages in target
823         suites and that cross-suite version checking rules as
824         set out in the conf file are satisfied."""
825
826         # Check versions for each target suite
827         for target_suite in self.pkg.changes["distribution"].keys():
828             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
829             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
830             # Enforce "must be newer than target suite" even if conffile omits it
831             if target_suite not in must_be_newer_than:
832                 must_be_newer_than.append(target_suite)
833             for entry in query_result:
834                 existent_version = entry[0]
835                 suite = entry[1]
836                 if suite in must_be_newer_than and \
837                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
838                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
839                 if suite in must_be_older_than and \
840                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
841                     ch = self.pkg.changes
842                     cansave = 0
843                     if ch.get('distribution-version', {}).has_key(suite):
844                         # we really use the other suite, ignoring the conflicting one ...
845                         addsuite = ch["distribution-version"][suite]
846                     
847                         add_version = self.get_anyversion(query_result, addsuite)
848                         target_version = self.get_anyversion(query_result, target_suite)
849                     
850                         if not add_version:
851                             # not add_version can only happen if we map to a suite
852                             # that doesn't enhance the suite we're propup'ing from.
853                             # so "propup-ver x a b c; map a d" is a problem only if
854                             # d doesn't enhance a.
855                             #
856                             # i think we could always propagate in this case, rather
857                             # than complaining. either way, this isn't a REJECT issue
858                             #
859                             # And - we really should complain to the dorks who configured dak
860                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
861                             self.pkg.changes.setdefault("propdistribution", {})
862                             self.pkg.changes["propdistribution"][addsuite] = 1
863                             cansave = 1
864                         elif not target_version:
865                             # not targets_version is true when the package is NEW
866                             # we could just stick with the "...old version..." REJECT
867                             # for this, I think.
868                             self.reject("Won't propogate NEW packages.")
869                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
870                             # propogation would be redundant. no need to reject though.
871                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
872                             cansave = 1
873                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
874                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
875                             # propogate!!
876                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
877                             self.pkg.changes.setdefault("propdistribution", {})
878                             self.pkg.changes["propdistribution"][addsuite] = 1
879                             cansave = 1
880                 
881                     if not cansave:
882                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
883
884     ################################################################################
885
886     def check_binary_against_db(self, file):
887         self.reject_message = ""
888         files = self.pkg.files
889
890         # Ensure version is sane
891         q = self.projectB.query("""
892 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
893                                      architecture a
894  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
895    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
896                                 % (files[file]["package"],
897                                    files[file]["architecture"]))
898         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
899
900         # Check for any existing copies of the file
901         q = self.projectB.query("""
902 SELECT b.id FROM binaries b, architecture a
903  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
904    AND a.id = b.architecture"""
905                                 % (files[file]["package"],
906                                    files[file]["version"],
907                                    files[file]["architecture"]))
908         if q.getresult():
909             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
910
911         return self.reject_message
912
913     ################################################################################
914
915     def check_source_against_db(self, file):
916         self.reject_message = ""
917         dsc = self.pkg.dsc
918
919         # Ensure version is sane
920         q = self.projectB.query("""
921 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
922  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
923         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
924
925         return self.reject_message
926
927     ################################################################################
928
929     # **WARNING**
930     # NB: this function can remove entries from the 'files' index [if
931     # the .orig.tar.gz is a duplicate of the one in the archive]; if
932     # you're iterating over 'files' and call this function as part of
933     # the loop, be sure to add a check to the top of the loop to
934     # ensure you haven't just tried to dereference the deleted entry.
935     # **WARNING**
936
937     def check_dsc_against_db(self, file):
938         self.reject_message = ""
939         files = self.pkg.files
940         dsc_files = self.pkg.dsc_files
941         legacy_source_untouchable = self.pkg.legacy_source_untouchable
942         self.pkg.orig_tar_gz = None
943
944         # Try and find all files mentioned in the .dsc.  This has
945         # to work harder to cope with the multiple possible
946         # locations of an .orig.tar.gz.
947         # The ordering on the select is needed to pick the newest orig
948         # when it exists in multiple places.
949         for dsc_file in dsc_files.keys():
950             found = None
951             if files.has_key(dsc_file):
952                 actual_md5 = files[dsc_file]["md5sum"]
953                 actual_size = int(files[dsc_file]["size"])
954                 found = "%s in incoming" % (dsc_file)
955                 # Check the file does not already exist in the archive
956                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
957                 ql = q.getresult()
958                 # Strip out anything that isn't '%s' or '/%s$'
959                 for i in ql:
960                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
961                         ql.remove(i)
962
963                 # "[dak] has not broken them.  [dak] has fixed a
964                 # brokenness.  Your crappy hack exploited a bug in
965                 # the old dinstall.
966                 #
967                 # "(Come on!  I thought it was always obvious that
968                 # one just doesn't release different files with
969                 # the same name and version.)"
970                 #                        -- ajk@ on d-devel@l.d.o
971
972                 if ql:
973                     # Ignore exact matches for .orig.tar.gz
974                     match = 0
975                     if dsc_file.endswith(".orig.tar.gz"):
976                         for i in ql:
977                             if files.has_key(dsc_file) and \
978                                int(files[dsc_file]["size"]) == int(i[0]) and \
979                                files[dsc_file]["md5sum"] == i[1]:
980                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
981                                 del files[dsc_file]
982                                 self.pkg.orig_tar_gz = i[2] + i[3]
983                                 match = 1
984
985                     if not match:
986                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
987             elif dsc_file.endswith(".orig.tar.gz"):
988                 # Check in the pool
989                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
990                 ql = q.getresult()
991                 # Strip out anything that isn't '%s' or '/%s$'
992                 for i in ql:
993                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
994                         ql.remove(i)
995
996                 if ql:
997                     # Unfortunately, we may get more than one match here if,
998                     # for example, the package was in potato but had an -sa
999                     # upload in woody.  So we need to choose the right one.
1000
1001                     x = ql[0]; # default to something sane in case we don't match any or have only one
1002
1003                     if len(ql) > 1:
1004                         for i in ql:
1005                             old_file = i[0] + i[1]
1006                             old_file_fh = utils.open_file(old_file)
1007                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1008                             old_file_fh.close()
1009                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1010                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1011                                 x = i
1012                             else:
1013                                 legacy_source_untouchable[i[3]] = ""
1014
1015                     old_file = x[0] + x[1]
1016                     old_file_fh = utils.open_file(old_file)
1017                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1018                     old_file_fh.close()
1019                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1020                     found = old_file
1021                     suite_type = x[2]
1022                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1023                     # See install() in process-accepted...
1024                     self.pkg.orig_tar_id = x[3]
1025                     self.pkg.orig_tar_gz = old_file
1026                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1027                         self.pkg.orig_tar_location = "legacy"
1028                     else:
1029                         self.pkg.orig_tar_location = x[4]
1030                 else:
1031                     # Not there? Check the queue directories...
1032
1033                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1034                     # See process_it() in 'dak process-unchecked' for explanation of this
1035                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1036                     # ever make sense?
1037                     if os.path.exists(in_unchecked) and False:
1038                         return (self.reject_message, in_unchecked)
1039                     else:
1040                         for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1041                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1042                             if os.path.exists(in_otherdir):
1043                                 in_otherdir_fh = utils.open_file(in_otherdir)
1044                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1045                                 in_otherdir_fh.close()
1046                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1047                                 found = in_otherdir
1048                                 self.pkg.orig_tar_gz = in_otherdir
1049
1050                     if not found:
1051                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1052                         self.pkg.orig_tar_gz = -1
1053                         continue
1054             else:
1055                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1056                 continue
1057             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1058                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1059             if actual_size != int(dsc_files[dsc_file]["size"]):
1060                 self.reject("size for %s doesn't match %s." % (found, file))
1061
1062         return (self.reject_message, None)
1063
1064     def do_query(self, q):
1065         sys.stderr.write("query: \"%s\" ... " % (q))
1066         before = time.time()
1067         r = self.projectB.query(q)
1068         time_diff = time.time()-before
1069         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1070         return r