]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
[??] sync with ftp-master/dak master
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ################################################################################
36
37 # Determine what parts in a .changes are NEW
38
39 def determine_new(changes, files, projectB, warn=1):
40     new = {}
41
42     # Build up a list of potentially new things
43     for file in files.keys():
44         f = files[file]
45         # Skip byhand elements
46         if f["type"] == "byhand":
47             continue
48         pkg = f["package"]
49         priority = f["priority"]
50         section = f["section"]
51         type = get_type(f)
52         component = f["component"]
53
54         if type == "dsc":
55             priority = "source"
56         if not new.has_key(pkg):
57             new[pkg] = {}
58             new[pkg]["priority"] = priority
59             new[pkg]["section"] = section
60             new[pkg]["type"] = type
61             new[pkg]["component"] = component
62             new[pkg]["files"] = []
63         else:
64             old_type = new[pkg]["type"]
65             if old_type != type:
66                 # source gets trumped by deb or udeb
67                 if old_type == "dsc":
68                     new[pkg]["priority"] = priority
69                     new[pkg]["section"] = section
70                     new[pkg]["type"] = type
71                     new[pkg]["component"] = component
72         new[pkg]["files"].append(file)
73         if f.has_key("othercomponents"):
74             new[pkg]["othercomponents"] = f["othercomponents"]
75
76     for suite in changes["suite"].keys():
77         suite_id = database.get_suite_id(suite)
78         for pkg in new.keys():
79             component_id = database.get_component_id(new[pkg]["component"])
80             type_id = database.get_override_type_id(new[pkg]["type"])
81             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
82             ql = q.getresult()
83             if ql:
84                 for file in new[pkg]["files"]:
85                     if files[file].has_key("new"):
86                         del files[file]["new"]
87                 del new[pkg]
88
89     if warn:
90         if changes["suite"].has_key("stable"):
91             print "WARNING: overrides will be added for stable!"
92             if changes["suite"].has_key("oldstable"):
93                 print "WARNING: overrides will be added for OLDstable!"
94         for pkg in new.keys():
95             if new[pkg].has_key("othercomponents"):
96                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
97
98     return new
99
100 ################################################################################
101
102 def get_type(f):
103     # Determine the type
104     if f.has_key("dbtype"):
105         type = f["dbtype"]
106     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
107         type = "dsc"
108     else:
109         fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (type))
110
111     # Validate the override type
112     type_id = database.get_override_type_id(type)
113     if type_id == -1:
114         fubar("invalid type (%s) for new.  Say wha?" % (type))
115
116     return type
117
118 ################################################################################
119
120 # check if section/priority values are valid
121
122 def check_valid(new):
123     for pkg in new.keys():
124         section = new[pkg]["section"]
125         priority = new[pkg]["priority"]
126         type = new[pkg]["type"]
127         new[pkg]["section id"] = database.get_section_id(section)
128         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
129         # Sanity checks
130         di = section.find("debian-installer") != -1
131         if (di and type != "udeb") or (not di and type == "udeb"):
132             new[pkg]["section id"] = -1
133         if (priority == "source" and type != "dsc") or \
134            (priority != "source" and type == "dsc"):
135             new[pkg]["priority id"] = -1
136
137
138 ###############################################################################
139
140 # Convenience wrapper to carry around all the package information in
141
142 class Pkg:
143     def __init__(self, **kwds):
144         self.__dict__.update(kwds)
145
146     def update(self, **kwds):
147         self.__dict__.update(kwds)
148
149 ###############################################################################
150
151 class Upload:
152
153     def __init__(self, Cnf):
154         self.Cnf = Cnf
155         self.accept_count = 0
156         self.accept_bytes = 0L
157         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158                        legacy_source_untouchable = {})
159
160         # Initialize the substitution template mapping global
161         Subst = self.Subst = {}
162         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
166
167         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168         database.init(Cnf, self.projectB)
169
170     ###########################################################################
171
172     def init_vars (self):
173         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174             exec "self.pkg.%s.clear();" % (i)
175         self.pkg.orig_tar_id = None
176         self.pkg.orig_tar_location = ""
177         self.pkg.orig_tar_gz = None
178
179     ###########################################################################
180
181     def update_vars (self):
182         dump_filename = self.pkg.changes_file[:-8]+".dak"
183         dump_file = utils.open_file(dump_filename)
184         p = cPickle.Unpickler(dump_file)
185         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186             exec "self.pkg.%s.update(p.load());" % (i)
187         for i in [ "orig_tar_id", "orig_tar_location" ]:
188             exec "self.pkg.%s = p.load();" % (i)
189         dump_file.close()
190
191     ###########################################################################
192
193     # This could just dump the dictionaries as is, but I'd like to
194     # avoid this so there's some idea of what process-accepted &
195     # process-new use from process-unchecked
196
197     def dump_vars(self, dest_dir):
198         for i in [ "changes", "dsc", "files", "dsc_files",
199                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200             exec "%s = self.pkg.%s;" % (i,i)
201         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202         dump_file = utils.open_file(dump_filename, 'w')
203         try:
204             os.chmod(dump_filename, 0660)
205         except OSError, e:
206             if errno.errorcode[e.errno] == 'EPERM':
207                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208                 if perms & stat.S_IROTH:
209                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
210             else:
211                 raise
212
213         p = cPickle.Pickler(dump_file, 1)
214         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
215             exec "%s = {}" % i
216         ## files
217         for file in files.keys():
218             d_files[file] = {}
219             for i in [ "package", "version", "architecture", "type", "size",
220                        "md5sum", "component", "location id", "source package",
221                        "source version", "maintainer", "dbtype", "files id",
222                        "new", "section", "priority", "othercomponents",
223                        "pool name", "original component" ]:
224                 if files[file].has_key(i):
225                     d_files[file][i] = files[file][i]
226         ## changes
227         # Mandatory changes fields
228         for i in [ "distribution", "source", "architecture", "version",
229                    "maintainer", "urgency", "fingerprint", "changedby822",
230                    "changedby2047", "changedbyname", "maintainer822",
231                    "maintainer2047", "maintainername", "maintaineremail",
232                    "closes", "changes" ]:
233             d_changes[i] = changes[i]
234         # Optional changes fields
235         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236             if changes.has_key(i):
237                 d_changes[i] = changes[i]
238         ## dsc
239         for i in [ "source", "version", "maintainer", "fingerprint",
240                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
241             if dsc.has_key(i):
242                 d_dsc[i] = dsc[i]
243         ## dsc_files
244         for file in dsc_files.keys():
245             d_dsc_files[file] = {}
246             # Mandatory dsc_files fields
247             for i in [ "size", "md5sum" ]:
248                 d_dsc_files[file][i] = dsc_files[file][i]
249             # Optional dsc_files fields
250             for i in [ "files id" ]:
251                 if dsc_files[file].has_key(i):
252                     d_dsc_files[file][i] = dsc_files[file][i]
253
254         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
256             p.dump(i)
257         dump_file.close()
258
259     ###########################################################################
260
261     # Set up the per-package template substitution mappings
262
263     def update_subst (self, reject_message = ""):
264         Subst = self.Subst
265         changes = self.pkg.changes
266         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268             changes["architecture"] = { "Unknown" : "" }
269         # and maintainer2047 may not exist.
270         if not changes.has_key("maintainer2047"):
271             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
272
273         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
276
277         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281                                                      changes["maintainer2047"])
282             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
283         else:
284             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
285             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
286             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
287         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
288             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
289
290         # Apply any global override of the Maintainer field
291         if self.Cnf.get("Dinstall::OverrideMaintainer"):
292             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
293             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
294
295         Subst["__REJECT_MESSAGE__"] = reject_message
296         Subst["__SOURCE__"] = changes.get("source", "Unknown")
297         Subst["__VERSION__"] = changes.get("version", "Unknown")
298
299     ###########################################################################
300
301     def build_summaries(self):
302         changes = self.pkg.changes
303         files = self.pkg.files
304
305         byhand = summary = new = ""
306
307         # changes["distribution"] may not exist in corner cases
308         # (e.g. unreadable changes files)
309         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
310             changes["distribution"] = {}
311
312         override_summary ="";
313         file_keys = files.keys()
314         file_keys.sort()
315         for file in file_keys:
316             if files[file].has_key("byhand"):
317                 byhand = 1
318                 summary += file + " byhand\n"
319             elif files[file].has_key("new"):
320                 new = 1
321                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
322                 if files[file].has_key("othercomponents"):
323                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
324                 if files[file]["type"] == "deb":
325                     deb_fh = utils.open_file(file)
326                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
327                     deb_fh.close()
328             else:
329                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
330                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
331                 summary += file + "\n  to " + destination + "\n"
332                 if not files[file].has_key("type"):
333                     files[file]["type"] = "unknown"
334                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
335                     # (queue/unchecked), there we have override entries already, use them
336                     # (process-new), there we dont have override entries, use the newly generated ones.
337                     override_prio = files[file].get("override priority", files[file]["priority"])
338                     override_sect = files[file].get("override section", files[file]["section"])
339                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
340
341         short_summary = summary
342
343         # This is for direport's benefit...
344         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
345
346         if byhand or new:
347             summary += "Changes: " + f
348
349         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
350
351         summary += self.announce(short_summary, 0)
352
353         return (summary, short_summary)
354
355     ###########################################################################
356
357     def close_bugs (self, summary, action):
358         changes = self.pkg.changes
359         Subst = self.Subst
360         Cnf = self.Cnf
361
362         bugs = changes["closes"].keys()
363
364         if not bugs:
365             return summary
366
367         bugs.sort()
368         summary += "Closing bugs: "
369         for bug in bugs:
370             summary += "%s " % (bug)
371             if action:
372                 Subst["__BUG_NUMBER__"] = bug
373                 if changes["distribution"].has_key("stable"):
374                     Subst["__STABLE_WARNING__"] = """
375 Note that this package is not part of the released stable Debian
376 distribution.  It may have dependencies on other unreleased software,
377 or other instabilities.  Please take care if you wish to install it.
378 The update will eventually make its way into the next released Debian
379 distribution."""
380                 else:
381                     Subst["__STABLE_WARNING__"] = ""
382                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
383                     utils.send_mail (mail_message)
384         if action:
385             self.Logger.log(["closing bugs"]+bugs)
386         summary += "\n"
387
388         return summary
389
390     ###########################################################################
391
392     def announce (self, short_summary, action):
393         Subst = self.Subst
394         Cnf = self.Cnf
395         changes = self.pkg.changes
396
397         # Only do announcements for source uploads with a recent dpkg-dev installed
398         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
399             return ""
400
401         lists_done = {}
402         summary = ""
403         Subst["__SHORT_SUMMARY__"] = short_summary
404
405         for dist in changes["distribution"].keys():
406             list = Cnf.Find("Suite::%s::Announce" % (dist))
407             if list == "" or lists_done.has_key(list):
408                 continue
409             lists_done[list] = 1
410             summary += "Announcing to %s\n" % (list)
411
412             if action:
413                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
414                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
415                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
416                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
417                 utils.send_mail (mail_message)
418
419         if Cnf.FindB("Dinstall::CloseBugs"):
420             summary = self.close_bugs(summary, action)
421
422         return summary
423
424     ###########################################################################
425
426     def accept (self, summary, short_summary):
427         Cnf = self.Cnf
428         Subst = self.Subst
429         files = self.pkg.files
430         changes = self.pkg.changes
431         changes_file = self.pkg.changes_file
432         dsc = self.pkg.dsc
433
434         print "Accepting."
435         self.Logger.log(["Accepting changes",changes_file])
436
437         self.dump_vars(Cnf["Dir::Queue::Accepted"])
438
439         # Move all the files into the accepted directory
440         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
441         file_keys = files.keys()
442         for file in file_keys:
443             utils.move(file, Cnf["Dir::Queue::Accepted"])
444             self.accept_bytes += float(files[file]["size"])
445         self.accept_count += 1
446
447         # Send accept mail, announce to lists, close bugs and check for
448         # override disparities
449         if not Cnf["Dinstall::Options::No-Mail"]:
450             Subst["__SUITE__"] = ""
451             Subst["__SUMMARY__"] = summary
452             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
453             utils.send_mail(mail_message)
454             self.announce(short_summary, 1)
455
456
457         ## Helper stuff for DebBugs Version Tracking
458         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
459             # ??? once queue/* is cleared on *.d.o and/or reprocessed
460             # the conditionalization on dsc["bts changelog"] should be
461             # dropped.
462
463             # Write out the version history from the changelog
464             if changes["architecture"].has_key("source") and \
465                dsc.has_key("bts changelog"):
466
467                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
468                                                     dotprefix=1, perms=0644)
469                 version_history = utils.open_file(temp_filename, 'w')
470                 version_history.write(dsc["bts changelog"])
471                 version_history.close()
472                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
473                                       changes_file[:-8]+".versions")
474                 os.rename(temp_filename, filename)
475
476             # Write out the binary -> source mapping.
477             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
478                                                 dotprefix=1, perms=0644)
479             debinfo = utils.open_file(temp_filename, 'w')
480             for file in file_keys:
481                 f = files[file]
482                 if f["type"] == "deb":
483                     line = " ".join([f["package"], f["version"],
484                                      f["architecture"], f["source package"],
485                                      f["source version"]])
486                     debinfo.write(line+"\n")
487             debinfo.close()
488             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
489                                   changes_file[:-8]+".debinfo")
490             os.rename(temp_filename, filename)
491
492         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
493
494     ###########################################################################
495
496     def queue_build (self, queue, path):
497         Cnf = self.Cnf
498         Subst = self.Subst
499         files = self.pkg.files
500         changes = self.pkg.changes
501         changes_file = self.pkg.changes_file
502         dsc = self.pkg.dsc
503         file_keys = files.keys()
504
505         ## Special support to enable clean auto-building of queued packages
506         queue_id = database.get_or_set_queue_id(queue)
507
508         self.projectB.query("BEGIN WORK")
509         for suite in changes["distribution"].keys():
510             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
511                 continue
512             suite_id = database.get_suite_id(suite)
513             dest_dir = Cnf["Dir::QueueBuild"]
514             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
515                 dest_dir = os.path.join(dest_dir, suite)
516             for file in file_keys:
517                 src = os.path.join(path, file)
518                 dest = os.path.join(dest_dir, file)
519                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
520                     # Copy it since the original won't be readable by www-data
521                     utils.copy(src, dest)
522                 else:
523                     # Create a symlink to it
524                     os.symlink(src, dest)
525                 # Add it to the list of packages for later processing by apt-ftparchive
526                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
527             # If the .orig.tar.gz is in the pool, create a symlink to
528             # it (if one doesn't already exist)
529             if self.pkg.orig_tar_id:
530                 # Determine the .orig.tar.gz file name
531                 for dsc_file in self.pkg.dsc_files.keys():
532                     if dsc_file.endswith(".orig.tar.gz"):
533                         filename = dsc_file
534                 dest = os.path.join(dest_dir, filename)
535                 # If it doesn't exist, create a symlink
536                 if not os.path.exists(dest):
537                     # Find the .orig.tar.gz in the pool
538                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
539                     ql = q.getresult()
540                     if not ql:
541                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
542                     src = os.path.join(ql[0][0], ql[0][1])
543                     os.symlink(src, dest)
544                     # Add it to the list of packages for later processing by apt-ftparchive
545                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
546                 # if it does, update things to ensure it's not removed prematurely
547                 else:
548                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
549
550         self.projectB.query("COMMIT WORK")
551
552     ###########################################################################
553
554     def check_override (self):
555         Subst = self.Subst
556         changes = self.pkg.changes
557         files = self.pkg.files
558         Cnf = self.Cnf
559
560         # Abandon the check if:
561         #  a) it's a non-sourceful upload
562         #  b) override disparity checks have been disabled
563         #  c) we're not sending mail
564         if not changes["architecture"].has_key("source") or \
565            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
566            Cnf["Dinstall::Options::No-Mail"]:
567             return
568
569         summary = ""
570         file_keys = files.keys()
571         file_keys.sort()
572         for file in file_keys:
573             if not files[file].has_key("new") and files[file]["type"] == "deb":
574                 section = files[file]["section"]
575                 override_section = files[file]["override section"]
576                 if section.lower() != override_section.lower() and section != "-":
577                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
578                 priority = files[file]["priority"]
579                 override_priority = files[file]["override priority"]
580                 if priority != override_priority and priority != "-":
581                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
582
583         if summary == "":
584             return
585
586         Subst["__SUMMARY__"] = summary
587         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
588         utils.send_mail(mail_message)
589
590     ###########################################################################
591
592     def force_reject (self, files):
593         """Forcefully move files from the current directory to the
594            reject directory.  If any file already exists in the reject
595            directory it will be moved to the morgue to make way for
596            the new file."""
597
598         Cnf = self.Cnf
599
600         for file in files:
601             # Skip any files which don't exist or which we don't have permission to copy.
602             if os.access(file,os.R_OK) == 0:
603                 continue
604             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
605             try:
606                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
607             except OSError, e:
608                 # File exists?  Let's try and move it to the morgue
609                 if errno.errorcode[e.errno] == 'EEXIST':
610                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
611                     try:
612                         morgue_file = utils.find_next_free(morgue_file)
613                     except utils.tried_too_hard_exc:
614                         # Something's either gone badly Pete Tong, or
615                         # someone is trying to exploit us.
616                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
617                         return
618                     utils.move(dest_file, morgue_file, perms=0660)
619                     try:
620                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
621                     except OSError, e:
622                         # Likewise
623                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
624                         return
625                 else:
626                     raise
627             # If we got here, we own the destination file, so we can
628             # safely overwrite it.
629             utils.move(file, dest_file, 1, perms=0660)
630             os.close(dest_fd)
631
632     ###########################################################################
633
634     def do_reject (self, manual = 0, reject_message = ""):
635         # If we weren't given a manual rejection message, spawn an
636         # editor so the user can add one in...
637         if manual and not reject_message:
638             temp_filename = utils.temp_filename()
639             editor = os.environ.get("EDITOR","vi")
640             answer = 'E'
641             while answer == 'E':
642                 os.system("%s %s" % (editor, temp_filename))
643                 temp_fh = utils.open_file(temp_filename)
644                 reject_message = "".join(temp_fh.readlines())
645                 temp_fh.close()
646                 print "Reject message:"
647                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
648                 prompt = "[R]eject, Edit, Abandon, Quit ?"
649                 answer = "XXX"
650                 while prompt.find(answer) == -1:
651                     answer = utils.our_raw_input(prompt)
652                     m = re_default_answer.search(prompt)
653                     if answer == "":
654                         answer = m.group(1)
655                     answer = answer[:1].upper()
656             os.unlink(temp_filename)
657             if answer == 'A':
658                 return 1
659             elif answer == 'Q':
660                 sys.exit(0)
661
662         print "Rejecting.\n"
663
664         Cnf = self.Cnf
665         Subst = self.Subst
666         pkg = self.pkg
667
668         reason_filename = pkg.changes_file[:-8] + ".reason"
669         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
670
671         # Move all the files into the reject directory
672         reject_files = pkg.files.keys() + [pkg.changes_file]
673         self.force_reject(reject_files)
674
675         # If we fail here someone is probably trying to exploit the race
676         # so let's just raise an exception ...
677         if os.path.exists(reason_filename):
678             os.unlink(reason_filename)
679         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
680
681         if not manual:
682             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
683             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
684             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
685             os.write(reason_fd, reject_message)
686             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
687         else:
688             # Build up the rejection email
689             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
690
691             Subst["__REJECTOR_ADDRESS__"] = user_email_address
692             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
693             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
694             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
695             # Write the rejection email out as the <foo>.reason file
696             os.write(reason_fd, reject_mail_message)
697
698         os.close(reason_fd)
699
700         # Send the rejection mail if appropriate
701         if not Cnf["Dinstall::Options::No-Mail"]:
702             utils.send_mail(reject_mail_message)
703
704         self.Logger.log(["rejected", pkg.changes_file])
705         return 0
706
707     ################################################################################
708
709     # Ensure that source exists somewhere in the archive for the binary
710     # upload being processed.
711     #
712     # (1) exact match                      => 1.0-3
713     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
714
715     def source_exists (self, package, source_version, suites = ["any"]):
716         okay = 1
717         for suite in suites:
718             if suite == "any":
719                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
720                     (package)
721             else:
722                 # source must exist in suite X, or in some other suite that's
723                 # mapped to X, recursively... silent-maps are counted too,
724                 # unreleased-maps aren't.
725                 maps = self.Cnf.ValueList("SuiteMappings")[:]
726                 maps.reverse()
727                 maps = [ m.split() for m in maps ]
728                 maps = [ (x[1], x[2]) for x in maps
729                                 if x[0] == "map" or x[0] == "silent-map" ]
730                 s = [suite]
731                 for x in maps:
732                         if x[1] in s and x[0] not in s:
733                                 s.append(x[0])
734
735                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
736             q = self.projectB.query(que)
737
738             # Reduce the query results to a list of version numbers
739             ql = [ i[0] for i in q.getresult() ]
740
741             # Try (1)
742             if source_version in ql:
743                 continue
744
745             # Try (2)
746             orig_source_version = re_bin_only_nmu.sub('', source_version)
747             if orig_source_version in ql:
748                 continue
749
750             # No source found...
751             okay = 0
752             break
753         return okay
754
755     ################################################################################
756     
757     def in_override_p (self, package, component, suite, binary_type, file):
758         files = self.pkg.files
759
760         if binary_type == "": # must be source
761             type = "dsc"
762         else:
763             type = binary_type
764
765         # Override suite name; used for example with proposed-updates
766         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
767             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
768
769         # Avoid <undef> on unknown distributions
770         suite_id = database.get_suite_id(suite)
771         if suite_id == -1:
772             return None
773         component_id = database.get_component_id(component)
774         type_id = database.get_override_type_id(type)
775
776         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
777                            % (package, suite_id, component_id, type_id))
778         result = q.getresult()
779         # If checking for a source package fall back on the binary override type
780         if type == "dsc" and not result:
781             deb_type_id = database.get_override_type_id("deb")
782             udeb_type_id = database.get_override_type_id("udeb")
783             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
784                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
785             result = q.getresult()
786
787         # Remember the section and priority so we can check them later if appropriate
788         if result:
789             files[file]["override section"] = result[0][0]
790             files[file]["override priority"] = result[0][1]
791
792         return result
793
794     ################################################################################
795
796     def reject (self, str, prefix="Rejected: "):
797         if str:
798             # Unlike other rejects we add new lines first to avoid trailing
799             # new lines when this message is passed back up to a caller.
800             if self.reject_message:
801                 self.reject_message += "\n"
802             self.reject_message += prefix + str
803
804     ################################################################################
805
806     def get_anyversion(self, query_result, suite):
807         anyversion=None
808         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
809         for (v, s) in query_result:
810             if s in [ x.lower() for x in anysuite ]:
811                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
812                     anyversion=v
813         return anyversion
814
815     ################################################################################
816
817     def cross_suite_version_check(self, query_result, file, new_version):
818         """Ensure versions are newer than existing packages in target
819         suites and that cross-suite version checking rules as
820         set out in the conf file are satisfied."""
821
822         # Check versions for each target suite
823         for target_suite in self.pkg.changes["distribution"].keys():
824             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
825             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
826             # Enforce "must be newer than target suite" even if conffile omits it
827             if target_suite not in must_be_newer_than:
828                 must_be_newer_than.append(target_suite)
829             for entry in query_result:
830                 existent_version = entry[0]
831                 suite = entry[1]
832                 if suite in must_be_newer_than and \
833                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
834                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
835                 if suite in must_be_older_than and \
836                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
837                     ch = self.pkg.changes
838                     cansave = 0
839                     if ch.get('distribution-version', {}).has_key(suite):
840                         # we really use the other suite, ignoring the conflicting one ...
841                         addsuite = ch["distribution-version"][suite]
842                     
843                         add_version = self.get_anyversion(query_result, addsuite)
844                         target_version = self.get_anyversion(query_result, target_suite)
845                     
846                         if not add_version:
847                             # not add_version can only happen if we map to a suite
848                             # that doesn't enhance the suite we're propup'ing from.
849                             # so "propup-ver x a b c; map a d" is a problem only if
850                             # d doesn't enhance a.
851                             #
852                             # i think we could always propagate in this case, rather
853                             # than complaining. either way, this isn't a REJECT issue
854                             #
855                             # And - we really should complain to the dorks who configured dak
856                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
857                             self.pkg.changes.setdefault("propdistribution", {})
858                             self.pkg.changes["propdistribution"][addsuite] = 1
859                             cansave = 1
860                         elif not target_version:
861                             # not targets_version is true when the package is NEW
862                             # we could just stick with the "...old version..." REJECT
863                             # for this, I think.
864                             self.reject("Won't propogate NEW packages.")
865                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
866                             # propogation would be redundant. no need to reject though.
867                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
868                             cansave = 1
869                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
870                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
871                             # propogate!!
872                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
873                             self.pkg.changes.setdefault("propdistribution", {})
874                             self.pkg.changes["propdistribution"][addsuite] = 1
875                             cansave = 1
876                 
877                     if not cansave:
878                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
879
880     ################################################################################
881
882     def check_binary_against_db(self, file):
883         self.reject_message = ""
884         files = self.pkg.files
885
886         # Ensure version is sane
887         q = self.projectB.query("""
888 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
889                                      architecture a
890  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
891    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
892                                 % (files[file]["package"],
893                                    files[file]["architecture"]))
894         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
895
896         # Check for any existing copies of the file
897         q = self.projectB.query("""
898 SELECT b.id FROM binaries b, architecture a
899  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
900    AND a.id = b.architecture"""
901                                 % (files[file]["package"],
902                                    files[file]["version"],
903                                    files[file]["architecture"]))
904         if q.getresult():
905             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
906
907         return self.reject_message
908
909     ################################################################################
910
911     def check_source_against_db(self, file):
912         self.reject_message = ""
913         dsc = self.pkg.dsc
914
915         # Ensure version is sane
916         q = self.projectB.query("""
917 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
918  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
919         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
920
921         return self.reject_message
922
923     ################################################################################
924
925     # **WARNING**
926     # NB: this function can remove entries from the 'files' index [if
927     # the .orig.tar.gz is a duplicate of the one in the archive]; if
928     # you're iterating over 'files' and call this function as part of
929     # the loop, be sure to add a check to the top of the loop to
930     # ensure you haven't just tried to dereference the deleted entry.
931     # **WARNING**
932
933     def check_dsc_against_db(self, file):
934         self.reject_message = ""
935         files = self.pkg.files
936         dsc_files = self.pkg.dsc_files
937         legacy_source_untouchable = self.pkg.legacy_source_untouchable
938         self.pkg.orig_tar_gz = None
939
940         # Try and find all files mentioned in the .dsc.  This has
941         # to work harder to cope with the multiple possible
942         # locations of an .orig.tar.gz.
943         # The ordering on the select is needed to pick the newest orig
944         # when it exists in multiple places.
945         for dsc_file in dsc_files.keys():
946             found = None
947             if files.has_key(dsc_file):
948                 actual_md5 = files[dsc_file]["md5sum"]
949                 actual_size = int(files[dsc_file]["size"])
950                 found = "%s in incoming" % (dsc_file)
951                 # Check the file does not already exist in the archive
952                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
953                 ql = q.getresult()
954                 # Strip out anything that isn't '%s' or '/%s$'
955                 for i in ql:
956                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
957                         ql.remove(i)
958
959                 # "[dak] has not broken them.  [dak] has fixed a
960                 # brokenness.  Your crappy hack exploited a bug in
961                 # the old dinstall.
962                 #
963                 # "(Come on!  I thought it was always obvious that
964                 # one just doesn't release different files with
965                 # the same name and version.)"
966                 #                        -- ajk@ on d-devel@l.d.o
967
968                 if ql:
969                     # Ignore exact matches for .orig.tar.gz
970                     match = 0
971                     if dsc_file.endswith(".orig.tar.gz"):
972                         for i in ql:
973                             if files.has_key(dsc_file) and \
974                                int(files[dsc_file]["size"]) == int(i[0]) and \
975                                files[dsc_file]["md5sum"] == i[1]:
976                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
977                                 del files[dsc_file]
978                                 self.pkg.orig_tar_gz = i[2] + i[3]
979                                 match = 1
980
981                     if not match:
982                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
983             elif dsc_file.endswith(".orig.tar.gz"):
984                 # Check in the pool
985                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
986                 ql = q.getresult()
987                 # Strip out anything that isn't '%s' or '/%s$'
988                 for i in ql:
989                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
990                         ql.remove(i)
991
992                 if ql:
993                     # Unfortunately, we may get more than one match here if,
994                     # for example, the package was in potato but had an -sa
995                     # upload in woody.  So we need to choose the right one.
996
997                     x = ql[0]; # default to something sane in case we don't match any or have only one
998
999                     if len(ql) > 1:
1000                         for i in ql:
1001                             old_file = i[0] + i[1]
1002                             old_file_fh = utils.open_file(old_file)
1003                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1004                             old_file_fh.close()
1005                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1006                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1007                                 x = i
1008                             else:
1009                                 legacy_source_untouchable[i[3]] = ""
1010
1011                     old_file = x[0] + x[1]
1012                     old_file_fh = utils.open_file(old_file)
1013                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1014                     old_file_fh.close()
1015                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1016                     found = old_file
1017                     suite_type = x[2]
1018                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1019                     # See install() in process-accepted...
1020                     self.pkg.orig_tar_id = x[3]
1021                     self.pkg.orig_tar_gz = old_file
1022                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1023                         self.pkg.orig_tar_location = "legacy"
1024                     else:
1025                         self.pkg.orig_tar_location = x[4]
1026                 else:
1027                     # Not there? Check the queue directories...
1028
1029                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1030                     # See process_it() in 'dak process-unchecked' for explanation of this
1031                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1032                     # ever make sense?
1033                     if os.path.exists(in_unchecked) and False:
1034                         return (self.reject_message, in_unchecked)
1035                     else:
1036                         for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1037                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1038                             if os.path.exists(in_otherdir):
1039                                 in_otherdir_fh = utils.open_file(in_otherdir)
1040                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1041                                 in_otherdir_fh.close()
1042                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1043                                 found = in_otherdir
1044                                 self.pkg.orig_tar_gz = in_otherdir
1045
1046                     if not found:
1047                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1048                         self.pkg.orig_tar_gz = -1
1049                         continue
1050             else:
1051                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1052                 continue
1053             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1054                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1055             if actual_size != int(dsc_files[dsc_file]["size"]):
1056                 self.reject("size for %s doesn't match %s." % (found, file))
1057
1058         return (self.reject_message, None)
1059
1060     def do_query(self, q):
1061         sys.stderr.write("query: \"%s\" ... " % (q))
1062         before = time.time()
1063         r = self.projectB.query(q)
1064         time_diff = time.time()-before
1065         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1066         return r