]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge from Thomas
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ################################################################################
36
37 # Determine what parts in a .changes are NEW
38
39 def determine_new(changes, files, projectB, warn=1):
40     new = {}
41
42     # Build up a list of potentially new things
43     for file in files.keys():
44         f = files[file]
45         # Skip byhand elements
46         if f["type"] == "byhand":
47             continue
48         pkg = f["package"]
49         priority = f["priority"]
50         section = f["section"]
51         type = get_type(f)
52         component = f["component"]
53
54         if type == "dsc":
55             priority = "source"
56         if not new.has_key(pkg):
57             new[pkg] = {}
58             new[pkg]["priority"] = priority
59             new[pkg]["section"] = section
60             new[pkg]["type"] = type
61             new[pkg]["component"] = component
62             new[pkg]["files"] = []
63         else:
64             old_type = new[pkg]["type"]
65             if old_type != type:
66                 # source gets trumped by deb or udeb
67                 if old_type == "dsc":
68                     new[pkg]["priority"] = priority
69                     new[pkg]["section"] = section
70                     new[pkg]["type"] = type
71                     new[pkg]["component"] = component
72         new[pkg]["files"].append(file)
73         if f.has_key("othercomponents"):
74             new[pkg]["othercomponents"] = f["othercomponents"]
75
76     for suite in changes["suite"].keys():
77         suite_id = database.get_suite_id(suite)
78         for pkg in new.keys():
79             component_id = database.get_component_id(new[pkg]["component"])
80             type_id = database.get_override_type_id(new[pkg]["type"])
81             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
82             ql = q.getresult()
83             if ql:
84                 for file in new[pkg]["files"]:
85                     if files[file].has_key("new"):
86                         del files[file]["new"]
87                 del new[pkg]
88
89     if warn:
90         if changes["suite"].has_key("stable"):
91             print "WARNING: overrides will be added for stable!"
92             if changes["suite"].has_key("oldstable"):
93                 print "WARNING: overrides will be added for OLDstable!"
94         for pkg in new.keys():
95             if new[pkg].has_key("othercomponents"):
96                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
97
98     return new
99
100 ################################################################################
101
102 def get_type(f):
103     # Determine the type
104     if f.has_key("dbtype"):
105         type = f["dbtype"]
106     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
107         type = "dsc"
108     else:
109         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (type))
110
111     # Validate the override type
112     type_id = database.get_override_type_id(type)
113     if type_id == -1:
114         utils.fubar("invalid type (%s) for new.  Say wha?" % (type))
115
116     return type
117
118 ################################################################################
119
120 # check if section/priority values are valid
121
122 def check_valid(new):
123     for pkg in new.keys():
124         section = new[pkg]["section"]
125         priority = new[pkg]["priority"]
126         type = new[pkg]["type"]
127         new[pkg]["section id"] = database.get_section_id(section)
128         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
129         # Sanity checks
130         di = section.find("debian-installer") != -1
131         if (di and type != "udeb") or (not di and type == "udeb"):
132             new[pkg]["section id"] = -1
133         if (priority == "source" and type != "dsc") or \
134            (priority != "source" and type == "dsc"):
135             new[pkg]["priority id"] = -1
136
137
138 ###############################################################################
139
140 # Convenience wrapper to carry around all the package information in
141
142 class Pkg:
143     def __init__(self, **kwds):
144         self.__dict__.update(kwds)
145
146     def update(self, **kwds):
147         self.__dict__.update(kwds)
148
149 ###############################################################################
150
151 class Upload:
152
153     def __init__(self, Cnf):
154         self.Cnf = Cnf
155         self.accept_count = 0
156         self.accept_bytes = 0L
157         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158                        legacy_source_untouchable = {})
159
160         # Initialize the substitution template mapping global
161         Subst = self.Subst = {}
162         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
166
167         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168         database.init(Cnf, self.projectB)
169
170     ###########################################################################
171
172     def init_vars (self):
173         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174             exec "self.pkg.%s.clear();" % (i)
175         self.pkg.orig_tar_id = None
176         self.pkg.orig_tar_location = ""
177         self.pkg.orig_tar_gz = None
178
179     ###########################################################################
180
181     def update_vars (self):
182         dump_filename = self.pkg.changes_file[:-8]+".dak"
183         dump_file = utils.open_file(dump_filename)
184         p = cPickle.Unpickler(dump_file)
185         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186             exec "self.pkg.%s.update(p.load());" % (i)
187         for i in [ "orig_tar_id", "orig_tar_location" ]:
188             exec "self.pkg.%s = p.load();" % (i)
189         dump_file.close()
190
191     ###########################################################################
192
193     # This could just dump the dictionaries as is, but I'd like to
194     # avoid this so there's some idea of what process-accepted &
195     # process-new use from process-unchecked
196
197     def dump_vars(self, dest_dir):
198         for i in [ "changes", "dsc", "files", "dsc_files",
199                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200             exec "%s = self.pkg.%s;" % (i,i)
201         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202         dump_file = utils.open_file(dump_filename, 'w')
203         try:
204             os.chmod(dump_filename, 0660)
205         except OSError, e:
206             if errno.errorcode[e.errno] == 'EPERM':
207                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208                 if perms & stat.S_IROTH:
209                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
210             else:
211                 raise
212
213         p = cPickle.Pickler(dump_file, 1)
214         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
215             exec "%s = {}" % i
216         ## files
217         for file in files.keys():
218             d_files[file] = {}
219             for i in [ "package", "version", "architecture", "type", "size",
220                        "md5sum", "component", "location id", "source package",
221                        "source version", "maintainer", "dbtype", "files id",
222                        "new", "section", "priority", "othercomponents",
223                        "pool name", "original component" ]:
224                 if files[file].has_key(i):
225                     d_files[file][i] = files[file][i]
226         ## changes
227         # Mandatory changes fields
228         for i in [ "distribution", "source", "architecture", "version",
229                    "maintainer", "urgency", "fingerprint", "changedby822",
230                    "changedby2047", "changedbyname", "maintainer822",
231                    "maintainer2047", "maintainername", "maintaineremail",
232                    "closes", "changes" ]:
233             d_changes[i] = changes[i]
234         # Optional changes fields
235         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236             if changes.has_key(i):
237                 d_changes[i] = changes[i]
238         ## dsc
239         for i in [ "source", "version", "maintainer", "fingerprint",
240                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
241             if dsc.has_key(i):
242                 d_dsc[i] = dsc[i]
243         ## dsc_files
244         for file in dsc_files.keys():
245             d_dsc_files[file] = {}
246             # Mandatory dsc_files fields
247             for i in [ "size", "md5sum" ]:
248                 d_dsc_files[file][i] = dsc_files[file][i]
249             # Optional dsc_files fields
250             for i in [ "files id" ]:
251                 if dsc_files[file].has_key(i):
252                     d_dsc_files[file][i] = dsc_files[file][i]
253
254         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
256             p.dump(i)
257         dump_file.close()
258
259     ###########################################################################
260
261     # Set up the per-package template substitution mappings
262
263     def update_subst (self, reject_message = ""):
264         Subst = self.Subst
265         changes = self.pkg.changes
266         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268             changes["architecture"] = { "Unknown" : "" }
269         # and maintainer2047 may not exist.
270         if not changes.has_key("maintainer2047"):
271             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
272
273         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
276
277         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281                                                      changes["maintainer2047"])
282             if "sponsoremail" in changes:
283                 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
284             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
285         else:
286             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
289         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
290             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
291
292         # Apply any global override of the Maintainer field
293         if self.Cnf.get("Dinstall::OverrideMaintainer"):
294             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
295             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
296
297         Subst["__REJECT_MESSAGE__"] = reject_message
298         Subst["__SOURCE__"] = changes.get("source", "Unknown")
299         Subst["__VERSION__"] = changes.get("version", "Unknown")
300
301     ###########################################################################
302
303     def build_summaries(self):
304         changes = self.pkg.changes
305         files = self.pkg.files
306
307         byhand = summary = new = ""
308
309         # changes["distribution"] may not exist in corner cases
310         # (e.g. unreadable changes files)
311         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
312             changes["distribution"] = {}
313
314         override_summary ="";
315         file_keys = files.keys()
316         file_keys.sort()
317         for file in file_keys:
318             if files[file].has_key("byhand"):
319                 byhand = 1
320                 summary += file + " byhand\n"
321             elif files[file].has_key("new"):
322                 new = 1
323                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
324                 if files[file].has_key("othercomponents"):
325                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
326                 if files[file]["type"] == "deb":
327                     deb_fh = utils.open_file(file)
328                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
329                     deb_fh.close()
330             else:
331                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
332                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
333                 summary += file + "\n  to " + destination + "\n"
334                 if not files[file].has_key("type"):
335                     files[file]["type"] = "unknown"
336                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
337                     # (queue/unchecked), there we have override entries already, use them
338                     # (process-new), there we dont have override entries, use the newly generated ones.
339                     override_prio = files[file].get("override priority", files[file]["priority"])
340                     override_sect = files[file].get("override section", files[file]["section"])
341                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
342
343         short_summary = summary
344
345         # This is for direport's benefit...
346         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
347
348         if byhand or new:
349             summary += "Changes: " + f
350
351         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
352
353         summary += self.announce(short_summary, 0)
354
355         return (summary, short_summary)
356
357     ###########################################################################
358
359     def close_bugs (self, summary, action):
360         changes = self.pkg.changes
361         Subst = self.Subst
362         Cnf = self.Cnf
363
364         bugs = changes["closes"].keys()
365
366         if not bugs:
367             return summary
368
369         bugs.sort()
370         summary += "Closing bugs: "
371         for bug in bugs:
372             summary += "%s " % (bug)
373             if action:
374                 Subst["__BUG_NUMBER__"] = bug
375                 if changes["distribution"].has_key("stable"):
376                     Subst["__STABLE_WARNING__"] = """
377 Note that this package is not part of the released stable Debian
378 distribution.  It may have dependencies on other unreleased software,
379 or other instabilities.  Please take care if you wish to install it.
380 The update will eventually make its way into the next released Debian
381 distribution."""
382                 else:
383                     Subst["__STABLE_WARNING__"] = ""
384                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
385                     utils.send_mail (mail_message)
386         if action:
387             self.Logger.log(["closing bugs"]+bugs)
388         summary += "\n"
389
390         return summary
391
392     ###########################################################################
393
394     def announce (self, short_summary, action):
395         Subst = self.Subst
396         Cnf = self.Cnf
397         changes = self.pkg.changes
398
399         # Only do announcements for source uploads with a recent dpkg-dev installed
400         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
401             return ""
402
403         lists_done = {}
404         summary = ""
405         Subst["__SHORT_SUMMARY__"] = short_summary
406
407         for dist in changes["distribution"].keys():
408             list = Cnf.Find("Suite::%s::Announce" % (dist))
409             if list == "" or lists_done.has_key(list):
410                 continue
411             lists_done[list] = 1
412             summary += "Announcing to %s\n" % (list)
413
414             if action:
415                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
416                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
417                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
418                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
419                 utils.send_mail (mail_message)
420
421         if Cnf.FindB("Dinstall::CloseBugs"):
422             summary = self.close_bugs(summary, action)
423
424         return summary
425
426     ###########################################################################
427
428     def accept (self, summary, short_summary):
429         Cnf = self.Cnf
430         Subst = self.Subst
431         files = self.pkg.files
432         changes = self.pkg.changes
433         changes_file = self.pkg.changes_file
434         dsc = self.pkg.dsc
435
436         print "Accepting."
437         self.Logger.log(["Accepting changes",changes_file])
438
439         self.dump_vars(Cnf["Dir::Queue::Accepted"])
440
441         # Move all the files into the accepted directory
442         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
443         file_keys = files.keys()
444         for file in file_keys:
445             utils.move(file, Cnf["Dir::Queue::Accepted"])
446             self.accept_bytes += float(files[file]["size"])
447         self.accept_count += 1
448
449         # Send accept mail, announce to lists, close bugs and check for
450         # override disparities
451         if not Cnf["Dinstall::Options::No-Mail"]:
452             Subst["__SUITE__"] = ""
453             Subst["__SUMMARY__"] = summary
454             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
455             utils.send_mail(mail_message)
456             self.announce(short_summary, 1)
457
458
459         ## Helper stuff for DebBugs Version Tracking
460         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
461             # ??? once queue/* is cleared on *.d.o and/or reprocessed
462             # the conditionalization on dsc["bts changelog"] should be
463             # dropped.
464
465             # Write out the version history from the changelog
466             if changes["architecture"].has_key("source") and \
467                dsc.has_key("bts changelog"):
468
469                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
470                                                     dotprefix=1, perms=0644)
471                 version_history = utils.open_file(temp_filename, 'w')
472                 version_history.write(dsc["bts changelog"])
473                 version_history.close()
474                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
475                                       changes_file[:-8]+".versions")
476                 os.rename(temp_filename, filename)
477
478             # Write out the binary -> source mapping.
479             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
480                                                 dotprefix=1, perms=0644)
481             debinfo = utils.open_file(temp_filename, 'w')
482             for file in file_keys:
483                 f = files[file]
484                 if f["type"] == "deb":
485                     line = " ".join([f["package"], f["version"],
486                                      f["architecture"], f["source package"],
487                                      f["source version"]])
488                     debinfo.write(line+"\n")
489             debinfo.close()
490             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
491                                   changes_file[:-8]+".debinfo")
492             os.rename(temp_filename, filename)
493
494         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
495
496     ###########################################################################
497
498     def queue_build (self, queue, path):
499         Cnf = self.Cnf
500         Subst = self.Subst
501         files = self.pkg.files
502         changes = self.pkg.changes
503         changes_file = self.pkg.changes_file
504         dsc = self.pkg.dsc
505         file_keys = files.keys()
506
507         ## Special support to enable clean auto-building of queued packages
508         queue_id = database.get_or_set_queue_id(queue)
509
510         self.projectB.query("BEGIN WORK")
511         for suite in changes["distribution"].keys():
512             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
513                 continue
514             suite_id = database.get_suite_id(suite)
515             dest_dir = Cnf["Dir::QueueBuild"]
516             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
517                 dest_dir = os.path.join(dest_dir, suite)
518             for file in file_keys:
519                 src = os.path.join(path, file)
520                 dest = os.path.join(dest_dir, file)
521                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
522                     # Copy it since the original won't be readable by www-data
523                     utils.copy(src, dest)
524                 else:
525                     # Create a symlink to it
526                     os.symlink(src, dest)
527                 # Add it to the list of packages for later processing by apt-ftparchive
528                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
529             # If the .orig.tar.gz is in the pool, create a symlink to
530             # it (if one doesn't already exist)
531             if self.pkg.orig_tar_id:
532                 # Determine the .orig.tar.gz file name
533                 for dsc_file in self.pkg.dsc_files.keys():
534                     if dsc_file.endswith(".orig.tar.gz"):
535                         filename = dsc_file
536                 dest = os.path.join(dest_dir, filename)
537                 # If it doesn't exist, create a symlink
538                 if not os.path.exists(dest):
539                     # Find the .orig.tar.gz in the pool
540                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
541                     ql = q.getresult()
542                     if not ql:
543                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
544                     src = os.path.join(ql[0][0], ql[0][1])
545                     os.symlink(src, dest)
546                     # Add it to the list of packages for later processing by apt-ftparchive
547                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
548                 # if it does, update things to ensure it's not removed prematurely
549                 else:
550                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
551
552         self.projectB.query("COMMIT WORK")
553
554     ###########################################################################
555
556     def check_override (self):
557         Subst = self.Subst
558         changes = self.pkg.changes
559         files = self.pkg.files
560         Cnf = self.Cnf
561
562         # Abandon the check if:
563         #  a) it's a non-sourceful upload
564         #  b) override disparity checks have been disabled
565         #  c) we're not sending mail
566         if not changes["architecture"].has_key("source") or \
567            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
568            Cnf["Dinstall::Options::No-Mail"]:
569             return
570
571         summary = ""
572         file_keys = files.keys()
573         file_keys.sort()
574         for file in file_keys:
575             if not files[file].has_key("new") and files[file]["type"] == "deb":
576                 section = files[file]["section"]
577                 override_section = files[file]["override section"]
578                 if section.lower() != override_section.lower() and section != "-":
579                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
580                 priority = files[file]["priority"]
581                 override_priority = files[file]["override priority"]
582                 if priority != override_priority and priority != "-":
583                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
584
585         if summary == "":
586             return
587
588         Subst["__SUMMARY__"] = summary
589         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
590         utils.send_mail(mail_message)
591
592     ###########################################################################
593
594     def force_reject (self, files):
595         """Forcefully move files from the current directory to the
596            reject directory.  If any file already exists in the reject
597            directory it will be moved to the morgue to make way for
598            the new file."""
599
600         Cnf = self.Cnf
601
602         for file in files:
603             # Skip any files which don't exist or which we don't have permission to copy.
604             if os.access(file,os.R_OK) == 0:
605                 continue
606             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
607             try:
608                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
609             except OSError, e:
610                 # File exists?  Let's try and move it to the morgue
611                 if errno.errorcode[e.errno] == 'EEXIST':
612                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
613                     try:
614                         morgue_file = utils.find_next_free(morgue_file)
615                     except utils.tried_too_hard_exc:
616                         # Something's either gone badly Pete Tong, or
617                         # someone is trying to exploit us.
618                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
619                         return
620                     utils.move(dest_file, morgue_file, perms=0660)
621                     try:
622                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
623                     except OSError, e:
624                         # Likewise
625                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
626                         return
627                 else:
628                     raise
629             # If we got here, we own the destination file, so we can
630             # safely overwrite it.
631             utils.move(file, dest_file, 1, perms=0660)
632             os.close(dest_fd)
633
634     ###########################################################################
635
636     def do_reject (self, manual = 0, reject_message = ""):
637         # If we weren't given a manual rejection message, spawn an
638         # editor so the user can add one in...
639         if manual and not reject_message:
640             temp_filename = utils.temp_filename()
641             editor = os.environ.get("EDITOR","vi")
642             answer = 'E'
643             while answer == 'E':
644                 os.system("%s %s" % (editor, temp_filename))
645                 temp_fh = utils.open_file(temp_filename)
646                 reject_message = "".join(temp_fh.readlines())
647                 temp_fh.close()
648                 print "Reject message:"
649                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
650                 prompt = "[R]eject, Edit, Abandon, Quit ?"
651                 answer = "XXX"
652                 while prompt.find(answer) == -1:
653                     answer = utils.our_raw_input(prompt)
654                     m = re_default_answer.search(prompt)
655                     if answer == "":
656                         answer = m.group(1)
657                     answer = answer[:1].upper()
658             os.unlink(temp_filename)
659             if answer == 'A':
660                 return 1
661             elif answer == 'Q':
662                 sys.exit(0)
663
664         print "Rejecting.\n"
665
666         Cnf = self.Cnf
667         Subst = self.Subst
668         pkg = self.pkg
669
670         reason_filename = pkg.changes_file[:-8] + ".reason"
671         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
672
673         # Move all the files into the reject directory
674         reject_files = pkg.files.keys() + [pkg.changes_file]
675         self.force_reject(reject_files)
676
677         # If we fail here someone is probably trying to exploit the race
678         # so let's just raise an exception ...
679         if os.path.exists(reason_filename):
680             os.unlink(reason_filename)
681         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
682
683         if not manual:
684             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
685             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
686             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
687             os.write(reason_fd, reject_message)
688             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
689         else:
690             # Build up the rejection email
691             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
692
693             Subst["__REJECTOR_ADDRESS__"] = user_email_address
694             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
695             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
696             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
697             # Write the rejection email out as the <foo>.reason file
698             os.write(reason_fd, reject_mail_message)
699
700         os.close(reason_fd)
701
702         # Send the rejection mail if appropriate
703         if not Cnf["Dinstall::Options::No-Mail"]:
704             utils.send_mail(reject_mail_message)
705
706         self.Logger.log(["rejected", pkg.changes_file])
707         return 0
708
709     ################################################################################
710
711     # Ensure that source exists somewhere in the archive for the binary
712     # upload being processed.
713     #
714     # (1) exact match                      => 1.0-3
715     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
716
717     def source_exists (self, package, source_version, suites = ["any"]):
718         okay = 1
719         for suite in suites:
720             if suite == "any":
721                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
722                     (package)
723             else:
724                 # source must exist in suite X, or in some other suite that's
725                 # mapped to X, recursively... silent-maps are counted too,
726                 # unreleased-maps aren't.
727                 maps = self.Cnf.ValueList("SuiteMappings")[:]
728                 maps.reverse()
729                 maps = [ m.split() for m in maps ]
730                 maps = [ (x[1], x[2]) for x in maps
731                                 if x[0] == "map" or x[0] == "silent-map" ]
732                 s = [suite]
733                 for x in maps:
734                         if x[1] in s and x[0] not in s:
735                                 s.append(x[0])
736
737                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
738             q = self.projectB.query(que)
739
740             # Reduce the query results to a list of version numbers
741             ql = [ i[0] for i in q.getresult() ]
742
743             # Try (1)
744             if source_version in ql:
745                 continue
746
747             # Try (2)
748             orig_source_version = re_bin_only_nmu.sub('', source_version)
749             if orig_source_version in ql:
750                 continue
751
752             # No source found...
753             okay = 0
754             break
755         return okay
756
757     ################################################################################
758     
759     def in_override_p (self, package, component, suite, binary_type, file):
760         files = self.pkg.files
761
762         if binary_type == "": # must be source
763             type = "dsc"
764         else:
765             type = binary_type
766
767         # Override suite name; used for example with proposed-updates
768         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
769             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
770
771         # Avoid <undef> on unknown distributions
772         suite_id = database.get_suite_id(suite)
773         if suite_id == -1:
774             return None
775         component_id = database.get_component_id(component)
776         type_id = database.get_override_type_id(type)
777
778         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
779                            % (package, suite_id, component_id, type_id))
780         result = q.getresult()
781         # If checking for a source package fall back on the binary override type
782         if type == "dsc" and not result:
783             deb_type_id = database.get_override_type_id("deb")
784             udeb_type_id = database.get_override_type_id("udeb")
785             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
786                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
787             result = q.getresult()
788
789         # Remember the section and priority so we can check them later if appropriate
790         if result:
791             files[file]["override section"] = result[0][0]
792             files[file]["override priority"] = result[0][1]
793
794         return result
795
796     ################################################################################
797
798     def reject (self, str, prefix="Rejected: "):
799         if str:
800             # Unlike other rejects we add new lines first to avoid trailing
801             # new lines when this message is passed back up to a caller.
802             if self.reject_message:
803                 self.reject_message += "\n"
804             self.reject_message += prefix + str
805
806     ################################################################################
807
808     def get_anyversion(self, query_result, suite):
809         anyversion=None
810         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
811         for (v, s) in query_result:
812             if s in [ x.lower() for x in anysuite ]:
813                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
814                     anyversion=v
815         return anyversion
816
817     ################################################################################
818
819     def cross_suite_version_check(self, query_result, file, new_version):
820         """Ensure versions are newer than existing packages in target
821         suites and that cross-suite version checking rules as
822         set out in the conf file are satisfied."""
823
824         # Check versions for each target suite
825         for target_suite in self.pkg.changes["distribution"].keys():
826             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
827             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
828             # Enforce "must be newer than target suite" even if conffile omits it
829             if target_suite not in must_be_newer_than:
830                 must_be_newer_than.append(target_suite)
831             for entry in query_result:
832                 existent_version = entry[0]
833                 suite = entry[1]
834                 if suite in must_be_newer_than and \
835                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
836                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
837                 if suite in must_be_older_than and \
838                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
839                     ch = self.pkg.changes
840                     cansave = 0
841                     if ch.get('distribution-version', {}).has_key(suite):
842                         # we really use the other suite, ignoring the conflicting one ...
843                         addsuite = ch["distribution-version"][suite]
844                     
845                         add_version = self.get_anyversion(query_result, addsuite)
846                         target_version = self.get_anyversion(query_result, target_suite)
847                     
848                         if not add_version:
849                             # not add_version can only happen if we map to a suite
850                             # that doesn't enhance the suite we're propup'ing from.
851                             # so "propup-ver x a b c; map a d" is a problem only if
852                             # d doesn't enhance a.
853                             #
854                             # i think we could always propagate in this case, rather
855                             # than complaining. either way, this isn't a REJECT issue
856                             #
857                             # And - we really should complain to the dorks who configured dak
858                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
859                             self.pkg.changes.setdefault("propdistribution", {})
860                             self.pkg.changes["propdistribution"][addsuite] = 1
861                             cansave = 1
862                         elif not target_version:
863                             # not targets_version is true when the package is NEW
864                             # we could just stick with the "...old version..." REJECT
865                             # for this, I think.
866                             self.reject("Won't propogate NEW packages.")
867                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
868                             # propogation would be redundant. no need to reject though.
869                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
870                             cansave = 1
871                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
872                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
873                             # propogate!!
874                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
875                             self.pkg.changes.setdefault("propdistribution", {})
876                             self.pkg.changes["propdistribution"][addsuite] = 1
877                             cansave = 1
878                 
879                     if not cansave:
880                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
881
882     ################################################################################
883
884     def check_binary_against_db(self, file):
885         self.reject_message = ""
886         files = self.pkg.files
887
888         # Ensure version is sane
889         q = self.projectB.query("""
890 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
891                                      architecture a
892  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
893    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
894                                 % (files[file]["package"],
895                                    files[file]["architecture"]))
896         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
897
898         # Check for any existing copies of the file
899         q = self.projectB.query("""
900 SELECT b.id FROM binaries b, architecture a
901  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
902    AND a.id = b.architecture"""
903                                 % (files[file]["package"],
904                                    files[file]["version"],
905                                    files[file]["architecture"]))
906         if q.getresult():
907             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
908
909         return self.reject_message
910
911     ################################################################################
912
913     def check_source_against_db(self, file):
914         self.reject_message = ""
915         dsc = self.pkg.dsc
916
917         # Ensure version is sane
918         q = self.projectB.query("""
919 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
920  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
921         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
922
923         return self.reject_message
924
925     ################################################################################
926
927     # **WARNING**
928     # NB: this function can remove entries from the 'files' index [if
929     # the .orig.tar.gz is a duplicate of the one in the archive]; if
930     # you're iterating over 'files' and call this function as part of
931     # the loop, be sure to add a check to the top of the loop to
932     # ensure you haven't just tried to dereference the deleted entry.
933     # **WARNING**
934
935     def check_dsc_against_db(self, file):
936         self.reject_message = ""
937         files = self.pkg.files
938         dsc_files = self.pkg.dsc_files
939         legacy_source_untouchable = self.pkg.legacy_source_untouchable
940         self.pkg.orig_tar_gz = None
941
942         # Try and find all files mentioned in the .dsc.  This has
943         # to work harder to cope with the multiple possible
944         # locations of an .orig.tar.gz.
945         # The ordering on the select is needed to pick the newest orig
946         # when it exists in multiple places.
947         for dsc_file in dsc_files.keys():
948             found = None
949             if files.has_key(dsc_file):
950                 actual_md5 = files[dsc_file]["md5sum"]
951                 actual_size = int(files[dsc_file]["size"])
952                 found = "%s in incoming" % (dsc_file)
953                 # Check the file does not already exist in the archive
954                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
955                 ql = q.getresult()
956                 # Strip out anything that isn't '%s' or '/%s$'
957                 for i in ql:
958                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
959                         ql.remove(i)
960
961                 # "[dak] has not broken them.  [dak] has fixed a
962                 # brokenness.  Your crappy hack exploited a bug in
963                 # the old dinstall.
964                 #
965                 # "(Come on!  I thought it was always obvious that
966                 # one just doesn't release different files with
967                 # the same name and version.)"
968                 #                        -- ajk@ on d-devel@l.d.o
969
970                 if ql:
971                     # Ignore exact matches for .orig.tar.gz
972                     match = 0
973                     if dsc_file.endswith(".orig.tar.gz"):
974                         for i in ql:
975                             if files.has_key(dsc_file) and \
976                                int(files[dsc_file]["size"]) == int(i[0]) and \
977                                files[dsc_file]["md5sum"] == i[1]:
978                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
979                                 del files[dsc_file]
980                                 self.pkg.orig_tar_gz = i[2] + i[3]
981                                 match = 1
982
983                     if not match:
984                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
985             elif dsc_file.endswith(".orig.tar.gz"):
986                 # Check in the pool
987                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
988                 ql = q.getresult()
989                 # Strip out anything that isn't '%s' or '/%s$'
990                 for i in ql:
991                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
992                         ql.remove(i)
993
994                 if ql:
995                     # Unfortunately, we may get more than one match here if,
996                     # for example, the package was in potato but had an -sa
997                     # upload in woody.  So we need to choose the right one.
998
999                     x = ql[0]; # default to something sane in case we don't match any or have only one
1000
1001                     if len(ql) > 1:
1002                         for i in ql:
1003                             old_file = i[0] + i[1]
1004                             old_file_fh = utils.open_file(old_file)
1005                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1006                             old_file_fh.close()
1007                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1008                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1009                                 x = i
1010                             else:
1011                                 legacy_source_untouchable[i[3]] = ""
1012
1013                     old_file = x[0] + x[1]
1014                     old_file_fh = utils.open_file(old_file)
1015                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1016                     old_file_fh.close()
1017                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1018                     found = old_file
1019                     suite_type = x[2]
1020                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1021                     # See install() in process-accepted...
1022                     self.pkg.orig_tar_id = x[3]
1023                     self.pkg.orig_tar_gz = old_file
1024                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1025                         self.pkg.orig_tar_location = "legacy"
1026                     else:
1027                         self.pkg.orig_tar_location = x[4]
1028                 else:
1029                     # Not there? Check the queue directories...
1030
1031                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1032                     # See process_it() in 'dak process-unchecked' for explanation of this
1033                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1034                     # ever make sense?
1035                     if os.path.exists(in_unchecked) and False:
1036                         return (self.reject_message, in_unchecked)
1037                     else:
1038                         for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1039                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1040                             if os.path.exists(in_otherdir):
1041                                 in_otherdir_fh = utils.open_file(in_otherdir)
1042                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1043                                 in_otherdir_fh.close()
1044                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1045                                 found = in_otherdir
1046                                 self.pkg.orig_tar_gz = in_otherdir
1047
1048                     if not found:
1049                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1050                         self.pkg.orig_tar_gz = -1
1051                         continue
1052             else:
1053                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1054                 continue
1055             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1056                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1057             if actual_size != int(dsc_files[dsc_file]["size"]):
1058                 self.reject("size for %s doesn't match %s." % (found, file))
1059
1060         return (self.reject_message, None)
1061
1062     def do_query(self, q):
1063         sys.stderr.write("query: \"%s\" ... " % (q))
1064         before = time.time()
1065         r = self.projectB.query(q)
1066         time_diff = time.time()-before
1067         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1068         return r