]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Remove code to handle bug closures differently for NMUs or experimental uploads and...
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ###############################################################################
36
37 # Convenience wrapper to carry around all the package information in
38
39 class Pkg:
40     def __init__(self, **kwds):
41         self.__dict__.update(kwds)
42
43     def update(self, **kwds):
44         self.__dict__.update(kwds)
45
46 ###############################################################################
47
48 class Upload:
49
50     def __init__(self, Cnf):
51         self.Cnf = Cnf
52         self.accept_count = 0
53         self.accept_bytes = 0L
54         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
55                        legacy_source_untouchable = {})
56
57         # Initialize the substitution template mapping global
58         Subst = self.Subst = {}
59         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
60         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
61         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
62         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
63
64         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
65         database.init(Cnf, self.projectB)
66
67     ###########################################################################
68
69     def init_vars (self):
70         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
71             exec "self.pkg.%s.clear();" % (i)
72         self.pkg.orig_tar_id = None
73         self.pkg.orig_tar_location = ""
74         self.pkg.orig_tar_gz = None
75
76     ###########################################################################
77
78     def update_vars (self):
79         dump_filename = self.pkg.changes_file[:-8]+".dak"
80         dump_file = utils.open_file(dump_filename)
81         p = cPickle.Unpickler(dump_file)
82         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
83             exec "self.pkg.%s.update(p.load());" % (i)
84         for i in [ "orig_tar_id", "orig_tar_location" ]:
85             exec "self.pkg.%s = p.load();" % (i)
86         dump_file.close()
87
88     ###########################################################################
89
90     # This could just dump the dictionaries as is, but I'd like to
91     # avoid this so there's some idea of what process-accepted &
92     # process-new use from process-unchecked
93
94     def dump_vars(self, dest_dir):
95         for i in [ "changes", "dsc", "files", "dsc_files",
96                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
97             exec "%s = self.pkg.%s;" % (i,i)
98         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
99         dump_file = utils.open_file(dump_filename, 'w')
100         try:
101             os.chmod(dump_filename, 0660)
102         except OSError, e:
103             if errno.errorcode[e.errno] == 'EPERM':
104                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
105                 if perms & stat.S_IROTH:
106                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
107             else:
108                 raise
109
110         p = cPickle.Pickler(dump_file, 1)
111         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
112             exec "%s = {}" % i
113         ## files
114         for file in files.keys():
115             d_files[file] = {}
116             for i in [ "package", "version", "architecture", "type", "size",
117                        "md5sum", "component", "location id", "source package",
118                        "source version", "maintainer", "dbtype", "files id",
119                        "new", "section", "priority", "othercomponents",
120                        "pool name", "original component" ]:
121                 if files[file].has_key(i):
122                     d_files[file][i] = files[file][i]
123         ## changes
124         # Mandatory changes fields
125         for i in [ "distribution", "source", "architecture", "version",
126                    "maintainer", "urgency", "fingerprint", "changedby822",
127                    "changedby2047", "changedbyname", "maintainer822",
128                    "maintainer2047", "maintainername", "maintaineremail",
129                    "closes", "changes" ]:
130             d_changes[i] = changes[i]
131         # Optional changes fields
132         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
133             if changes.has_key(i):
134                 d_changes[i] = changes[i]
135         ## dsc
136         for i in [ "source", "version", "maintainer", "fingerprint",
137                    "uploaders", "bts changelog" ]:
138             if dsc.has_key(i):
139                 d_dsc[i] = dsc[i]
140         ## dsc_files
141         for file in dsc_files.keys():
142             d_dsc_files[file] = {}
143             # Mandatory dsc_files fields
144             for i in [ "size", "md5sum" ]:
145                 d_dsc_files[file][i] = dsc_files[file][i]
146             # Optional dsc_files fields
147             for i in [ "files id" ]:
148                 if dsc_files[file].has_key(i):
149                     d_dsc_files[file][i] = dsc_files[file][i]
150
151         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
152                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
153             p.dump(i)
154         dump_file.close()
155
156     ###########################################################################
157
158     # Set up the per-package template substitution mappings
159
160     def update_subst (self, reject_message = ""):
161         Subst = self.Subst
162         changes = self.pkg.changes
163         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
164         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
165             changes["architecture"] = { "Unknown" : "" }
166         # and maintainer2047 may not exist.
167         if not changes.has_key("maintainer2047"):
168             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
169
170         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
171         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
172         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
173
174         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
175         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
176             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
177             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
178                                                      changes["maintainer2047"])
179             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
180         else:
181             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
182             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
183             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
184         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
185             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
186
187         # Apply any global override of the Maintainer field
188         if self.Cnf.get("Dinstall::OverrideMaintainer"):
189             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
190             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
191
192         Subst["__REJECT_MESSAGE__"] = reject_message
193         Subst["__SOURCE__"] = changes.get("source", "Unknown")
194         Subst["__VERSION__"] = changes.get("version", "Unknown")
195
196     ###########################################################################
197
198     def build_summaries(self):
199         changes = self.pkg.changes
200         files = self.pkg.files
201
202         byhand = summary = new = ""
203
204         # changes["distribution"] may not exist in corner cases
205         # (e.g. unreadable changes files)
206         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
207             changes["distribution"] = {}
208
209         override_summary ="";
210         file_keys = files.keys()
211         file_keys.sort()
212         for file in file_keys:
213             if files[file].has_key("byhand"):
214                 byhand = 1
215                 summary += file + " byhand\n"
216             elif files[file].has_key("new"):
217                 new = 1
218                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
219                 if files[file].has_key("othercomponents"):
220                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
221                 if files[file]["type"] == "deb":
222                     deb_fh = utils.open_file(file)
223                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
224                     deb_fh.close()
225             else:
226                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
227                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
228                 summary += file + "\n  to " + destination + "\n"
229                 if not files[file].has_key("type"):
230                     files[file]["type"] = "unknown"
231                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
232                     # (queue/unchecked), there we have override entries already, use them
233                     # (process-new), there we dont have override entries, use the newly generated ones.
234                     override_prio = files[file].get("override priority", files[file]["priority"])
235                     override_sect = files[file].get("override section", files[file]["section"])
236                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
237
238         short_summary = summary
239
240         # This is for direport's benefit...
241         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
242
243         if byhand or new:
244             summary += "Changes: " + f
245
246         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
247
248         summary += self.announce(short_summary, 0)
249
250         return (summary, short_summary)
251
252     ###########################################################################
253
254     def close_bugs (self, summary, action):
255         changes = self.pkg.changes
256         Subst = self.Subst
257         Cnf = self.Cnf
258
259         bugs = changes["closes"].keys()
260
261         if not bugs:
262             return summary
263
264         bugs.sort()
265         summary += "Closing bugs: "
266         for bug in bugs:
267             summary += "%s " % (bug)
268             if action:
269                 Subst["__BUG_NUMBER__"] = bug
270                 if changes["distribution"].has_key("stable"):
271                     Subst["__STABLE_WARNING__"] = """
272 Note that this package is not part of the released stable Debian
273 distribution.  It may have dependencies on other unreleased software,
274 or other instabilities.  Please take care if you wish to install it.
275 The update will eventually make its way into the next released Debian
276 distribution."""
277                 else:
278                     Subst["__STABLE_WARNING__"] = ""
279                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
280                     utils.send_mail (mail_message)
281         if action:
282             self.Logger.log(["closing bugs"]+bugs)
283         summary += "\n"
284
285         return summary
286
287     ###########################################################################
288
289     def announce (self, short_summary, action):
290         Subst = self.Subst
291         Cnf = self.Cnf
292         changes = self.pkg.changes
293
294         # Only do announcements for source uploads with a recent dpkg-dev installed
295         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
296             return ""
297
298         lists_done = {}
299         summary = ""
300         Subst["__SHORT_SUMMARY__"] = short_summary
301
302         for dist in changes["distribution"].keys():
303             list = Cnf.Find("Suite::%s::Announce" % (dist))
304             if list == "" or lists_done.has_key(list):
305                 continue
306             lists_done[list] = 1
307             summary += "Announcing to %s\n" % (list)
308
309             if action:
310                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
311                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
312                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
313                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
314                 utils.send_mail (mail_message)
315
316         if Cnf.FindB("Dinstall::CloseBugs"):
317             summary = self.close_bugs(summary, action)
318
319         return summary
320
321     ###########################################################################
322
323     def accept (self, summary, short_summary):
324         Cnf = self.Cnf
325         Subst = self.Subst
326         files = self.pkg.files
327         changes = self.pkg.changes
328         changes_file = self.pkg.changes_file
329         dsc = self.pkg.dsc
330
331         print "Accepting."
332         self.Logger.log(["Accepting changes",changes_file])
333
334         self.dump_vars(Cnf["Dir::Queue::Accepted"])
335
336         # Move all the files into the accepted directory
337         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
338         file_keys = files.keys()
339         for file in file_keys:
340             utils.move(file, Cnf["Dir::Queue::Accepted"])
341             self.accept_bytes += float(files[file]["size"])
342         self.accept_count += 1
343
344         # Send accept mail, announce to lists, close bugs and check for
345         # override disparities
346         if not Cnf["Dinstall::Options::No-Mail"]:
347             Subst["__SUITE__"] = ""
348             Subst["__SUMMARY__"] = summary
349             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
350             utils.send_mail(mail_message)
351             self.announce(short_summary, 1)
352
353
354         ## Helper stuff for DebBugs Version Tracking
355         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
356             # ??? once queue/* is cleared on *.d.o and/or reprocessed
357             # the conditionalization on dsc["bts changelog"] should be
358             # dropped.
359
360             # Write out the version history from the changelog
361             if changes["architecture"].has_key("source") and \
362                dsc.has_key("bts changelog"):
363
364                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
365                                                     dotprefix=1, perms=0644)
366                 version_history = utils.open_file(temp_filename, 'w')
367                 version_history.write(dsc["bts changelog"])
368                 version_history.close()
369                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
370                                       changes_file[:-8]+".versions")
371                 os.rename(temp_filename, filename)
372
373             # Write out the binary -> source mapping.
374             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
375                                                 dotprefix=1, perms=0644)
376             debinfo = utils.open_file(temp_filename, 'w')
377             for file in file_keys:
378                 f = files[file]
379                 if f["type"] == "deb":
380                     line = " ".join([f["package"], f["version"],
381                                      f["architecture"], f["source package"],
382                                      f["source version"]])
383                     debinfo.write(line+"\n")
384             debinfo.close()
385             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
386                                   changes_file[:-8]+".debinfo")
387             os.rename(temp_filename, filename)
388
389         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
390
391     ###########################################################################
392
393     def queue_build (self, queue, path):
394         Cnf = self.Cnf
395         Subst = self.Subst
396         files = self.pkg.files
397         changes = self.pkg.changes
398         changes_file = self.pkg.changes_file
399         dsc = self.pkg.dsc
400         file_keys = files.keys()
401
402         ## Special support to enable clean auto-building of queued packages
403         queue_id = database.get_or_set_queue_id(queue)
404
405         self.projectB.query("BEGIN WORK")
406         for suite in changes["distribution"].keys():
407             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
408                 continue
409             suite_id = database.get_suite_id(suite)
410             dest_dir = Cnf["Dir::QueueBuild"]
411             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
412                 dest_dir = os.path.join(dest_dir, suite)
413             for file in file_keys:
414                 src = os.path.join(path, file)
415                 dest = os.path.join(dest_dir, file)
416                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
417                     # Copy it since the original won't be readable by www-data
418                     utils.copy(src, dest)
419                 else:
420                     # Create a symlink to it
421                     os.symlink(src, dest)
422                 # Add it to the list of packages for later processing by apt-ftparchive
423                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
424             # If the .orig.tar.gz is in the pool, create a symlink to
425             # it (if one doesn't already exist)
426             if self.pkg.orig_tar_id:
427                 # Determine the .orig.tar.gz file name
428                 for dsc_file in self.pkg.dsc_files.keys():
429                     if dsc_file.endswith(".orig.tar.gz"):
430                         filename = dsc_file
431                 dest = os.path.join(dest_dir, filename)
432                 # If it doesn't exist, create a symlink
433                 if not os.path.exists(dest):
434                     # Find the .orig.tar.gz in the pool
435                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
436                     ql = q.getresult()
437                     if not ql:
438                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
439                     src = os.path.join(ql[0][0], ql[0][1])
440                     os.symlink(src, dest)
441                     # Add it to the list of packages for later processing by apt-ftparchive
442                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
443                 # if it does, update things to ensure it's not removed prematurely
444                 else:
445                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
446
447         self.projectB.query("COMMIT WORK")
448
449     ###########################################################################
450
451     def check_override (self):
452         Subst = self.Subst
453         changes = self.pkg.changes
454         files = self.pkg.files
455         Cnf = self.Cnf
456
457         # Abandon the check if:
458         #  a) it's a non-sourceful upload
459         #  b) override disparity checks have been disabled
460         #  c) we're not sending mail
461         if not changes["architecture"].has_key("source") or \
462            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
463            Cnf["Dinstall::Options::No-Mail"]:
464             return
465
466         summary = ""
467         file_keys = files.keys()
468         file_keys.sort()
469         for file in file_keys:
470             if not files[file].has_key("new") and files[file]["type"] == "deb":
471                 section = files[file]["section"]
472                 override_section = files[file]["override section"]
473                 if section.lower() != override_section.lower() and section != "-":
474                     # Ignore this; it's a common mistake and not worth whining about
475                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
476                         continue
477                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
478                 priority = files[file]["priority"]
479                 override_priority = files[file]["override priority"]
480                 if priority != override_priority and priority != "-":
481                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
482
483         if summary == "":
484             return
485
486         Subst["__SUMMARY__"] = summary
487         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
488         utils.send_mail(mail_message)
489
490     ###########################################################################
491
492     def force_reject (self, files):
493         """Forcefully move files from the current directory to the
494            reject directory.  If any file already exists in the reject
495            directory it will be moved to the morgue to make way for
496            the new file."""
497
498         Cnf = self.Cnf
499
500         for file in files:
501             # Skip any files which don't exist or which we don't have permission to copy.
502             if os.access(file,os.R_OK) == 0:
503                 continue
504             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
505             try:
506                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
507             except OSError, e:
508                 # File exists?  Let's try and move it to the morgue
509                 if errno.errorcode[e.errno] == 'EEXIST':
510                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
511                     try:
512                         morgue_file = utils.find_next_free(morgue_file)
513                     except utils.tried_too_hard_exc:
514                         # Something's either gone badly Pete Tong, or
515                         # someone is trying to exploit us.
516                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
517                         return
518                     utils.move(dest_file, morgue_file, perms=0660)
519                     try:
520                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
521                     except OSError, e:
522                         # Likewise
523                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
524                         return
525                 else:
526                     raise
527             # If we got here, we own the destination file, so we can
528             # safely overwrite it.
529             utils.move(file, dest_file, 1, perms=0660)
530             os.close(dest_fd)
531
532     ###########################################################################
533
534     def do_reject (self, manual = 0, reject_message = ""):
535         # If we weren't given a manual rejection message, spawn an
536         # editor so the user can add one in...
537         if manual and not reject_message:
538             temp_filename = utils.temp_filename()
539             editor = os.environ.get("EDITOR","vi")
540             answer = 'E'
541             while answer == 'E':
542                 os.system("%s %s" % (editor, temp_filename))
543                 temp_fh = utils.open_file(temp_filename)
544                 reject_message = "".join(temp_fh.readlines())
545                 temp_fh.close()
546                 print "Reject message:"
547                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
548                 prompt = "[R]eject, Edit, Abandon, Quit ?"
549                 answer = "XXX"
550                 while prompt.find(answer) == -1:
551                     answer = utils.our_raw_input(prompt)
552                     m = re_default_answer.search(prompt)
553                     if answer == "":
554                         answer = m.group(1)
555                     answer = answer[:1].upper()
556             os.unlink(temp_filename)
557             if answer == 'A':
558                 return 1
559             elif answer == 'Q':
560                 sys.exit(0)
561
562         print "Rejecting.\n"
563
564         Cnf = self.Cnf
565         Subst = self.Subst
566         pkg = self.pkg
567
568         reason_filename = pkg.changes_file[:-8] + ".reason"
569         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
570
571         # Move all the files into the reject directory
572         reject_files = pkg.files.keys() + [pkg.changes_file]
573         self.force_reject(reject_files)
574
575         # If we fail here someone is probably trying to exploit the race
576         # so let's just raise an exception ...
577         if os.path.exists(reason_filename):
578             os.unlink(reason_filename)
579         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
580
581         if not manual:
582             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
583             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
584             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
585             os.write(reason_fd, reject_message)
586             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
587         else:
588             # Build up the rejection email
589             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
590
591             Subst["__REJECTOR_ADDRESS__"] = user_email_address
592             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
593             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
594             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
595             # Write the rejection email out as the <foo>.reason file
596             os.write(reason_fd, reject_mail_message)
597
598         os.close(reason_fd)
599
600         # Send the rejection mail if appropriate
601         if not Cnf["Dinstall::Options::No-Mail"]:
602             utils.send_mail(reject_mail_message)
603
604         self.Logger.log(["rejected", pkg.changes_file])
605         return 0
606
607     ################################################################################
608
609     # Ensure that source exists somewhere in the archive for the binary
610     # upload being processed.
611     #
612     # (1) exact match                      => 1.0-3
613     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
614
615     def source_exists (self, package, source_version, suites = ["any"]):
616         okay = 1
617         for suite in suites:
618             if suite == "any":
619                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
620                     (package)
621             else:
622                 # source must exist in suite X, or in some other suite that's
623                 # mapped to X, recursively... silent-maps are counted too,
624                 # unreleased-maps aren't.
625                 maps = self.Cnf.ValueList("SuiteMappings")[:]
626                 maps.reverse()
627                 maps = [ m.split() for m in maps ]
628                 maps = [ (x[1], x[2]) for x in maps
629                                 if x[0] == "map" or x[0] == "silent-map" ]
630                 s = [suite]
631                 for x in maps:
632                         if x[1] in s and x[0] not in s:
633                                 s.append(x[0])
634
635                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
636             q = self.projectB.query(que)
637
638             # Reduce the query results to a list of version numbers
639             ql = [ i[0] for i in q.getresult() ]
640
641             # Try (1)
642             if source_version in ql:
643                 continue
644
645             # Try (2)
646             orig_source_version = re_bin_only_nmu.sub('', source_version)
647             if orig_source_version in ql:
648                 continue
649
650             # No source found...
651             okay = 0
652             break
653         return okay
654
655     ################################################################################
656     
657     def in_override_p (self, package, component, suite, binary_type, file):
658         files = self.pkg.files
659
660         if binary_type == "": # must be source
661             type = "dsc"
662         else:
663             type = binary_type
664
665         # Override suite name; used for example with proposed-updates
666         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
667             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
668
669         # Avoid <undef> on unknown distributions
670         suite_id = database.get_suite_id(suite)
671         if suite_id == -1:
672             return None
673         component_id = database.get_component_id(component)
674         type_id = database.get_override_type_id(type)
675
676         # FIXME: nasty non-US speficic hack
677         if component.lower().startswith("non-us/"):
678             component = component[7:]
679
680         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
681                            % (package, suite_id, component_id, type_id))
682         result = q.getresult()
683         # If checking for a source package fall back on the binary override type
684         if type == "dsc" and not result:
685             deb_type_id = database.get_override_type_id("deb")
686             udeb_type_id = database.get_override_type_id("udeb")
687             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
688                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
689             result = q.getresult()
690
691         # Remember the section and priority so we can check them later if appropriate
692         if result:
693             files[file]["override section"] = result[0][0]
694             files[file]["override priority"] = result[0][1]
695
696         return result
697
698     ################################################################################
699
700     def reject (self, str, prefix="Rejected: "):
701         if str:
702             # Unlike other rejects we add new lines first to avoid trailing
703             # new lines when this message is passed back up to a caller.
704             if self.reject_message:
705                 self.reject_message += "\n"
706             self.reject_message += prefix + str
707
708     ################################################################################
709
710     def get_anyversion(self, query_result, suite):
711         anyversion=None
712         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
713         for (v, s) in query_result:
714             if s in [ x.lower() for x in anysuite ]:
715                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
716                     anyversion=v
717         return anyversion
718
719     ################################################################################
720
721     def cross_suite_version_check(self, query_result, file, new_version):
722         """Ensure versions are newer than existing packages in target
723         suites and that cross-suite version checking rules as
724         set out in the conf file are satisfied."""
725
726         # Check versions for each target suite
727         for target_suite in self.pkg.changes["distribution"].keys():
728             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
729             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
730             # Enforce "must be newer than target suite" even if conffile omits it
731             if target_suite not in must_be_newer_than:
732                 must_be_newer_than.append(target_suite)
733             for entry in query_result:
734                 existent_version = entry[0]
735                 suite = entry[1]
736                 if suite in must_be_newer_than and \
737                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
738                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
739                 if suite in must_be_older_than and \
740                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
741                     ch = self.pkg.changes
742                     cansave = 0
743                     if ch.get('distribution-version', {}).has_key(suite):
744                         # we really use the other suite, ignoring the conflicting one ...
745                         addsuite = ch["distribution-version"][suite]
746                     
747                         add_version = self.get_anyversion(query_result, addsuite)
748                         target_version = self.get_anyversion(query_result, target_suite)
749                     
750                         if not add_version:
751                             # not add_version can only happen if we map to a suite
752                             # that doesn't enhance the suite we're propup'ing from.
753                             # so "propup-ver x a b c; map a d" is a problem only if
754                             # d doesn't enhance a.
755                             #
756                             # i think we could always propagate in this case, rather
757                             # than complaining. either way, this isn't a REJECT issue
758                             #
759                             # And - we really should complain to the dorks who configured dak
760                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
761                             self.pkg.changes.setdefault("propdistribution", {})
762                             self.pkg.changes["propdistribution"][addsuite] = 1
763                             cansave = 1
764                         elif not target_version:
765                             # not targets_version is true when the package is NEW
766                             # we could just stick with the "...old version..." REJECT
767                             # for this, I think.
768                             self.reject("Won't propogate NEW packages.")
769                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
770                             # propogation would be redundant. no need to reject though.
771                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
772                             cansave = 1
773                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
774                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
775                             # propogate!!
776                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
777                             self.pkg.changes.setdefault("propdistribution", {})
778                             self.pkg.changes["propdistribution"][addsuite] = 1
779                             cansave = 1
780                 
781                     if not cansave:
782                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
783
784     ################################################################################
785
786     def check_binary_against_db(self, file):
787         self.reject_message = ""
788         files = self.pkg.files
789
790         # Ensure version is sane
791         q = self.projectB.query("""
792 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
793                                      architecture a
794  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
795    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
796                                 % (files[file]["package"],
797                                    files[file]["architecture"]))
798         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
799
800         # Check for any existing copies of the file
801         q = self.projectB.query("""
802 SELECT b.id FROM binaries b, architecture a
803  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
804    AND a.id = b.architecture"""
805                                 % (files[file]["package"],
806                                    files[file]["version"],
807                                    files[file]["architecture"]))
808         if q.getresult():
809             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
810
811         return self.reject_message
812
813     ################################################################################
814
815     def check_source_against_db(self, file):
816         self.reject_message = ""
817         dsc = self.pkg.dsc
818
819         # Ensure version is sane
820         q = self.projectB.query("""
821 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
822  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
823         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
824
825         return self.reject_message
826
827     ################################################################################
828
829     # **WARNING**
830     # NB: this function can remove entries from the 'files' index [if
831     # the .orig.tar.gz is a duplicate of the one in the archive]; if
832     # you're iterating over 'files' and call this function as part of
833     # the loop, be sure to add a check to the top of the loop to
834     # ensure you haven't just tried to derefernece the deleted entry.
835     # **WARNING**
836
837     def check_dsc_against_db(self, file):
838         self.reject_message = ""
839         files = self.pkg.files
840         dsc_files = self.pkg.dsc_files
841         legacy_source_untouchable = self.pkg.legacy_source_untouchable
842         self.pkg.orig_tar_gz = None
843
844         # Try and find all files mentioned in the .dsc.  This has
845         # to work harder to cope with the multiple possible
846         # locations of an .orig.tar.gz.
847         # The ordering on the select is needed to pick the newest orig
848         # when it exists in multiple places.
849         for dsc_file in dsc_files.keys():
850             found = None
851             if files.has_key(dsc_file):
852                 actual_md5 = files[dsc_file]["md5sum"]
853                 actual_size = int(files[dsc_file]["size"])
854                 found = "%s in incoming" % (dsc_file)
855                 # Check the file does not already exist in the archive
856                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
857                 ql = q.getresult()
858                 # Strip out anything that isn't '%s' or '/%s$'
859                 for i in ql:
860                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
861                         ql.remove(i)
862
863                 # "[dak] has not broken them.  [dak] has fixed a
864                 # brokenness.  Your crappy hack exploited a bug in
865                 # the old dinstall.
866                 #
867                 # "(Come on!  I thought it was always obvious that
868                 # one just doesn't release different files with
869                 # the same name and version.)"
870                 #                        -- ajk@ on d-devel@l.d.o
871
872                 if ql:
873                     # Ignore exact matches for .orig.tar.gz
874                     match = 0
875                     if dsc_file.endswith(".orig.tar.gz"):
876                         for i in ql:
877                             if files.has_key(dsc_file) and \
878                                int(files[dsc_file]["size"]) == int(i[0]) and \
879                                files[dsc_file]["md5sum"] == i[1]:
880                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
881                                 del files[dsc_file]
882                                 self.pkg.orig_tar_gz = i[2] + i[3]
883                                 match = 1
884
885                     if not match:
886                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
887             elif dsc_file.endswith(".orig.tar.gz"):
888                 # Check in the pool
889                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
890                 ql = q.getresult()
891                 # Strip out anything that isn't '%s' or '/%s$'
892                 for i in ql:
893                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
894                         ql.remove(i)
895
896                 if ql:
897                     # Unfortunately, we may get more than one match here if,
898                     # for example, the package was in potato but had an -sa
899                     # upload in woody.  So we need to choose the right one.
900
901                     x = ql[0]; # default to something sane in case we don't match any or have only one
902
903                     if len(ql) > 1:
904                         for i in ql:
905                             old_file = i[0] + i[1]
906                             old_file_fh = utils.open_file(old_file)
907                             actual_md5 = apt_pkg.md5sum(old_file_fh)
908                             old_file_fh.close()
909                             actual_size = os.stat(old_file)[stat.ST_SIZE]
910                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
911                                 x = i
912                             else:
913                                 legacy_source_untouchable[i[3]] = ""
914
915                     old_file = x[0] + x[1]
916                     old_file_fh = utils.open_file(old_file)
917                     actual_md5 = apt_pkg.md5sum(old_file_fh)
918                     old_file_fh.close()
919                     actual_size = os.stat(old_file)[stat.ST_SIZE]
920                     found = old_file
921                     suite_type = x[2]
922                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
923                     # See install() in process-accepted...
924                     self.pkg.orig_tar_id = x[3]
925                     self.pkg.orig_tar_gz = old_file
926                     if suite_type == "legacy" or suite_type == "legacy-mixed":
927                         self.pkg.orig_tar_location = "legacy"
928                     else:
929                         self.pkg.orig_tar_location = x[4]
930                 else:
931                     # Not there? Check the queue directories...
932
933                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
934                     # See process_it() in 'dak process-unchecked' for explanation of this
935                     if os.path.exists(in_unchecked):
936                         return (self.reject_message, in_unchecked)
937                     else:
938                         for dir in [ "Accepted", "New", "Byhand" ]:
939                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
940                             if os.path.exists(in_otherdir):
941                                 in_otherdir_fh = utils.open_file(in_otherdir)
942                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
943                                 in_otherdir_fh.close()
944                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
945                                 found = in_otherdir
946                                 self.pkg.orig_tar_gz = in_otherdir
947
948                     if not found:
949                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
950                         self.pkg.orig_tar_gz = -1
951                         continue
952             else:
953                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
954                 continue
955             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
956                 self.reject("md5sum for %s doesn't match %s." % (found, file))
957             if actual_size != int(dsc_files[dsc_file]["size"]):
958                 self.reject("size for %s doesn't match %s." % (found, file))
959
960         return (self.reject_message, None)
961
962     def do_query(self, q):
963         sys.stderr.write("query: \"%s\" ... " % (q))
964         before = time.time()
965         r = self.projectB.query(q)
966         time_diff = time.time()-before
967         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
968         return r