]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Clean up reject/warning/notes system
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import cPickle
30 import errno
31 import os
32 import pg
33 import stat
34 import sys
35 import time
36 import apt_inst
37 import apt_pkg
38 import utils
39 from types import *
40
41 from dak_exceptions import *
42 from changes import *
43 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu, re_strip_srcver, re_valid_pkg_name, re_isanum, re_no_epoch, re_no_revision
44 from config import Config
45 from dbconn import *
46 from summarystats import SummaryStats
47 from utils import parse_changes
48 from textutils import fix_maintainer
49
50 ###############################################################################
51
52 def get_type(f, session=None):
53     """
54     Get the file type of C{f}
55
56     @type f: dict
57     @param f: file entry from Changes object
58
59     @rtype: string
60     @return: filetype
61
62     """
63     if session is None:
64         session = DBConn().session()
65
66     # Determine the type
67     if f.has_key("dbtype"):
68         file_type = file["dbtype"]
69     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
70         file_type = "dsc"
71     else:
72         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
73
74     # Validate the override type
75     type_id = get_override_type(file_type, session)
76     if type_id is None:
77         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
78
79     return file_type
80
81 ################################################################################
82
83 # Determine what parts in a .changes are NEW
84
85 def determine_new(changes, files, warn=1):
86     """
87     Determine what parts in a C{changes} file are NEW.
88
89     @type changes: Upload.Pkg.changes dict
90     @param changes: Changes dictionary
91
92     @type files: Upload.Pkg.files dict
93     @param files: Files dictionary
94
95     @type warn: bool
96     @param warn: Warn if overrides are added for (old)stable
97
98     @rtype: dict
99     @return: dictionary of NEW components.
100
101     """
102     new = {}
103
104     session = DBConn().session()
105
106     # Build up a list of potentially new things
107     for name, f in files.items():
108         # Skip byhand elements
109         if f["type"] == "byhand":
110             continue
111         pkg = f["package"]
112         priority = f["priority"]
113         section = f["section"]
114         file_type = get_type(f)
115         component = f["component"]
116
117         if file_type == "dsc":
118             priority = "source"
119
120         if not new.has_key(pkg):
121             new[pkg] = {}
122             new[pkg]["priority"] = priority
123             new[pkg]["section"] = section
124             new[pkg]["type"] = file_type
125             new[pkg]["component"] = component
126             new[pkg]["files"] = []
127         else:
128             old_type = new[pkg]["type"]
129             if old_type != file_type:
130                 # source gets trumped by deb or udeb
131                 if old_type == "dsc":
132                     new[pkg]["priority"] = priority
133                     new[pkg]["section"] = section
134                     new[pkg]["type"] = file_type
135                     new[pkg]["component"] = component
136
137         new[pkg]["files"].append(name)
138
139         if f.has_key("othercomponents"):
140             new[pkg]["othercomponents"] = f["othercomponents"]
141
142     for suite in changes["suite"].keys():
143         for pkg in new.keys():
144             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
145             if len(ql) > 0:
146                 for file_entry in new[pkg]["files"]:
147                     if files[file_entry].has_key("new"):
148                         del files[file_entry]["new"]
149                 del new[pkg]
150
151     if warn:
152         for s in ['stable', 'oldstable']:
153             if changes["suite"].has_key(s):
154                 print "WARNING: overrides will be added for %s!" % s
155         for pkg in new.keys():
156             if new[pkg].has_key("othercomponents"):
157                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
158
159     return new
160
161 ################################################################################
162
163 def check_valid(new):
164     """
165     Check if section and priority for NEW packages exist in database.
166     Additionally does sanity checks:
167       - debian-installer packages have to be udeb (or source)
168       - non debian-installer packages can not be udeb
169       - source priority can only be assigned to dsc file types
170
171     @type new: dict
172     @param new: Dict of new packages with their section, priority and type.
173
174     """
175     for pkg in new.keys():
176         section_name = new[pkg]["section"]
177         priority_name = new[pkg]["priority"]
178         file_type = new[pkg]["type"]
179
180         section = get_section(section_name)
181         if section is None:
182             new[pkg]["section id"] = -1
183         else:
184             new[pkg]["section id"] = section.section_id
185
186         priority = get_priority(priority_name)
187         if priority is None:
188             new[pkg]["priority id"] = -1
189         else:
190             new[pkg]["priority id"] = priority.priority_id
191
192         # Sanity checks
193         di = section_name.find("debian-installer") != -1
194
195         # If d-i, we must be udeb and vice-versa
196         if     (di and file_type not in ("udeb", "dsc")) or \
197            (not di and file_type == "udeb"):
198             new[pkg]["section id"] = -1
199
200         # If dsc we need to be source and vice-versa
201         if (priority == "source" and file_type != "dsc") or \
202            (priority != "source" and file_type == "dsc"):
203             new[pkg]["priority id"] = -1
204
205 ###############################################################################
206
207 class Upload(object):
208     """
209     Everything that has to do with an upload processed.
210
211     """
212     def __init__(self):
213         self.pkg = Changes()
214         self.reset()
215
216     ###########################################################################
217
218     def reset (self):
219         """ Reset a number of internal variables."""
220
221         # Initialize the substitution template map
222         cnf = Config()
223         self.Subst = {}
224         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
225         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
226         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
227         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
228
229         self.rejects = []
230         self.warnings = []
231         self.notes = []
232
233         self.pkg.reset()
234
235     def package_info(self):
236         msg = ''
237
238         if len(self.rejects) > 0:
239             msg += "Reject Reasons:\n"
240             msg += "\n".join(self.rejects)
241
242         if len(self.warnings) > 0:
243             msg += "Warnings:\n"
244             msg += "\n".join(self.warnings)
245
246         if len(self.notes) > 0:
247             msg += "Notes:\n"
248             msg += "\n".join(self.notes)
249
250         return msg
251
252     ###########################################################################
253     def update_subst(self):
254         """ Set up the per-package template substitution mappings """
255
256         cnf = Config()
257
258         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
259         if not self.pkg.changes.has_key("architecture") or not \
260            isinstance(changes["architecture"], DictType):
261             self.pkg.changes["architecture"] = { "Unknown" : "" }
262
263         # and maintainer2047 may not exist.
264         if not self.pkg.changes.has_key("maintainer2047"):
265             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
266
267         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
268         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
269         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
270
271         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
272         if self.pkg.changes["architecture"].has_key("source") and \
273            self.pkg.changes["changedby822"] != "" and \
274            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
275
276             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
277             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
278             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
279         else:
280             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
281             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
282             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
283
284         if "sponsoremail" in self.pkg.changes:
285             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
286
287         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
288             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
289
290         # Apply any global override of the Maintainer field
291         if cnf.get("Dinstall::OverrideMaintainer"):
292             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
293             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
294
295         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
296         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
297         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
298
299     ###########################################################################
300     def load_changes(self, filename):
301         """
302         @rtype boolean
303         @rvalue: whether the changes file was valid or not.  We may want to
304                  reject even if this is True (see what gets put in self.rejects).
305                  This is simply to prevent us even trying things later which will
306                  fail because we couldn't properly parse the file.
307         """
308         self.pkg.changes_file = filename
309
310         # Parse the .changes field into a dictionary
311         try:
312             self.pkg.changes.update(parse_changes(filename))
313         except CantOpenError:
314             self.rejects.append("%s: can't read file." % (filename))
315             return False
316         except ParseChangesError, line:
317             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
318             return False
319         except ChangesUnicodeError:
320             self.rejects.append("%s: changes file not proper utf-8" % (filename))
321             return False
322
323         # Parse the Files field from the .changes into another dictionary
324         try:
325             self.pkg.files.update(build_file_list(self.pkg.changes))
326         except ParseChangesError, line:
327             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
328             return False
329         except UnknownFormatError, format:
330             self.rejects.append("%s: unknown format '%s'." % (filename, format))
331             return False
332
333         # Check for mandatory fields
334         for i in ("distribution", "source", "binary", "architecture",
335                   "version", "maintainer", "files", "changes", "description"):
336             if not self.pkg.changes.has_key(i):
337                 # Avoid undefined errors later
338                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
339                 return False
340
341         # Strip a source version in brackets from the source field
342         if re_strip_srcver.search(self.pkg.changes["source"]):
343             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
344
345         # Ensure the source field is a valid package name.
346         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
347             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
348
349         # Split multi-value fields into a lower-level dictionary
350         for i in ("architecture", "distribution", "binary", "closes"):
351             o = self.pkg.changes.get(i, "")
352             if o != "":
353                 del self.pkg.changes[i]
354
355             self.pkg.changes[i] = {}
356
357             for j in o.split():
358                 self.pkg.changes[i][j] = 1
359
360         # Fix the Maintainer: field to be RFC822/2047 compatible
361         try:
362             (self.pkg.changes["maintainer822"],
363              self.pkg.changes["maintainer2047"],
364              self.pkg.changes["maintainername"],
365              self.pkg.changes["maintaineremail"]) = \
366                    fix_maintainer (self.pkg.changes["maintainer"])
367         except ParseMaintError, msg:
368             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
369                    % (filename, changes["maintainer"], msg))
370
371         # ...likewise for the Changed-By: field if it exists.
372         try:
373             (self.pkg.changes["changedby822"],
374              self.pkg.changes["changedby2047"],
375              self.pkg.changes["changedbyname"],
376              self.pkg.changes["changedbyemail"]) = \
377                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
378         except ParseMaintError, msg:
379             self.pkg.changes["changedby822"] = ""
380             self.pkg.changes["changedby2047"] = ""
381             self.pkg.changes["changedbyname"] = ""
382             self.pkg.changes["changedbyemail"] = ""
383
384             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
385                    % (filename, changes["changed-by"], msg))
386
387         # Ensure all the values in Closes: are numbers
388         if self.pkg.changes.has_key("closes"):
389             for i in self.pkg.changes["closes"].keys():
390                 if re_isanum.match (i) == None:
391                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
392
393         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
394         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
395         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
396
397         # Check there isn't already a changes file of the same name in one
398         # of the queue directories.
399         base_filename = os.path.basename(filename)
400         for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
401             if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename):
402                 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
403
404         # Check the .changes is non-empty
405         if not self.pkg.files:
406             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
407             return False
408
409         # Changes was syntactically valid even if we'll reject
410         return True
411
412     ###########################################################################
413
414     def check_distributions(self):
415         "Check and map the Distribution field"
416
417         Cnf = Config()
418
419         # Handle suite mappings
420         for m in Cnf.ValueList("SuiteMappings"):
421             args = m.split()
422             mtype = args[0]
423             if mtype == "map" or mtype == "silent-map":
424                 (source, dest) = args[1:3]
425                 if self.pkg.changes["distribution"].has_key(source):
426                     del self.pkg.changes["distribution"][source]
427                     self.pkg.changes["distribution"][dest] = 1
428                     if mtype != "silent-map":
429                         self.notes.append("Mapping %s to %s." % (source, dest))
430                 if self.pkg.changes.has_key("distribution-version"):
431                     if self.pkg.changes["distribution-version"].has_key(source):
432                         self.pkg.changes["distribution-version"][source]=dest
433             elif mtype == "map-unreleased":
434                 (source, dest) = args[1:3]
435                 if self.pkg.changes["distribution"].has_key(source):
436                     for arch in self.pkg.changes["architecture"].keys():
437                         if arch not in [ arch_string for a in get_suite_architectures(source) ]:
438                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
439                             del self.pkg.changes["distribution"][source]
440                             self.pkg.changes["distribution"][dest] = 1
441                             break
442             elif mtype == "ignore":
443                 suite = args[1]
444                 if self.pkg.changes["distribution"].has_key(suite):
445                     del self.pkg.changes["distribution"][suite]
446                     self.warnings.append("Ignoring %s as a target suite." % (suite))
447             elif mtype == "reject":
448                 suite = args[1]
449                 if self.pkg.changes["distribution"].has_key(suite):
450                     self.rejects.append("Uploads to %s are not accepted." % (suite))
451             elif mtype == "propup-version":
452                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
453                 #
454                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
455                 if self.pkg.changes["distribution"].has_key(args[1]):
456                     self.pkg.changes.setdefault("distribution-version", {})
457                     for suite in args[2:]:
458                         self.pkg.changes["distribution-version"][suite] = suite
459
460         # Ensure there is (still) a target distribution
461         if len(self.pkg.changes["distribution"].keys()) < 1:
462             self.rejects.append("No valid distribution remaining.")
463
464         # Ensure target distributions exist
465         for suite in self.pkg.changes["distribution"].keys():
466             if not Cnf.has_key("Suite::%s" % (suite)):
467                 self.rejects.append("Unknown distribution `%s'." % (suite))
468
469     ###########################################################################
470
471     def build_summaries(self):
472         """ Build a summary of changes the upload introduces. """
473
474         (byhand, new, summary, override_summary) = self.pkg.file_summary()
475
476         short_summary = summary
477
478         # This is for direport's benefit...
479         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
480
481         if byhand or new:
482             summary += "Changes: " + f
483
484         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
485
486         summary += self.announce(short_summary, 0)
487
488         return (summary, short_summary)
489
490     ###########################################################################
491
492     def close_bugs(self, summary, action):
493         """
494         Send mail to close bugs as instructed by the closes field in the changes file.
495         Also add a line to summary if any work was done.
496
497         @type summary: string
498         @param summary: summary text, as given by L{build_summaries}
499
500         @type action: bool
501         @param action: Set to false no real action will be done.
502
503         @rtype: string
504         @return: summary. If action was taken, extended by the list of closed bugs.
505
506         """
507
508         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
509
510         bugs = self.pkg.changes["closes"].keys()
511
512         if not bugs:
513             return summary
514
515         bugs.sort()
516         summary += "Closing bugs: "
517         for bug in bugs:
518             summary += "%s " % (bug)
519             if action:
520                 self.Subst["__BUG_NUMBER__"] = bug
521                 if self.pkg.changes["distribution"].has_key("stable"):
522                     self.Subst["__STABLE_WARNING__"] = """
523 Note that this package is not part of the released stable Debian
524 distribution.  It may have dependencies on other unreleased software,
525 or other instabilities.  Please take care if you wish to install it.
526 The update will eventually make its way into the next released Debian
527 distribution."""
528                 else:
529                     self.Subst["__STABLE_WARNING__"] = ""
530                     mail_message = utils.TemplateSubst(self.Subst, template)
531                     utils.send_mail(mail_message)
532
533                 # Clear up after ourselves
534                 del self.Subst["__BUG_NUMBER__"]
535                 del self.Subst["__STABLE_WARNING__"]
536
537         if action:
538             self.Logger.log(["closing bugs"] + bugs)
539
540         summary += "\n"
541
542         return summary
543
544     ###########################################################################
545
546     def announce(self, short_summary, action):
547         """
548         Send an announce mail about a new upload.
549
550         @type short_summary: string
551         @param short_summary: Short summary text to include in the mail
552
553         @type action: bool
554         @param action: Set to false no real action will be done.
555
556         @rtype: string
557         @return: Textstring about action taken.
558
559         """
560
561         cnf = Config()
562         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
563
564         # Only do announcements for source uploads with a recent dpkg-dev installed
565         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
566            self.pkg.changes["architecture"].has_key("source"):
567             return ""
568
569         lists_done = {}
570         summary = ""
571
572         self.Subst["__SHORT_SUMMARY__"] = short_summary
573
574         for dist in self.pkg.changes["distribution"].keys():
575             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
576             if announce_list == "" or lists_done.has_key(announce_list):
577                 continue
578
579             lists_done[announce_list] = 1
580             summary += "Announcing to %s\n" % (announce_list)
581
582             if action:
583                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
584                 if cnf.get("Dinstall::TrackingServer") and \
585                    self.pkg.changes["architecture"].has_key("source"):
586                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
587                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
588
589                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
590                 utils.send_mail(mail_message)
591
592                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
593
594         if cnf.FindB("Dinstall::CloseBugs"):
595             summary = self.close_bugs(summary, action)
596
597         del self.Subst["__SHORT_SUMMARY__"]
598
599         return summary
600
601     ###########################################################################
602
603     def accept (self, summary, short_summary, targetdir=None):
604         """
605         Accept an upload.
606
607         This moves all files referenced from the .changes into the I{accepted}
608         queue, sends the accepted mail, announces to lists, closes bugs and
609         also checks for override disparities. If enabled it will write out
610         the version history for the BTS Version Tracking and will finally call
611         L{queue_build}.
612
613         @type summary: string
614         @param summary: Summary text
615
616         @type short_summary: string
617         @param short_summary: Short summary
618
619         """
620
621         cnf = Config()
622         stats = SummaryStats()
623
624         accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
625
626         if targetdir is None:
627             targetdir = cnf["Dir::Queue::Accepted"]
628
629         print "Accepting."
630         self.Logger.log(["Accepting changes", self.pkg.changes_file])
631
632         self.write_dot_dak(targetdir)
633
634         # Move all the files into the accepted directory
635         utils.move(self.pkg.changes_file, targetdir)
636
637         for name, entry in sorted(self.pkg.files.items()):
638             utils.move(name, targetdir)
639             stats.accept_bytes += float(entry["size"])
640
641         stats.accept_count += 1
642
643         # Send accept mail, announce to lists, close bugs and check for
644         # override disparities
645         if not cnf["Dinstall::Options::No-Mail"]:
646             self.Subst["__SUITE__"] = ""
647             self.Subst["__SUMMARY__"] = summary
648             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
649             utils.send_mail(mail_message)
650             self.announce(short_summary, 1)
651
652         ## Helper stuff for DebBugs Version Tracking
653         if cnf.Find("Dir::Queue::BTSVersionTrack"):
654             # ??? once queue/* is cleared on *.d.o and/or reprocessed
655             # the conditionalization on dsc["bts changelog"] should be
656             # dropped.
657
658             # Write out the version history from the changelog
659             if self.pkg.changes["architecture"].has_key("source") and \
660                self.pkg.dsc.has_key("bts changelog"):
661
662                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
663                 version_history = os.fdopen(fd, 'w')
664                 version_history.write(self.pkg.dsc["bts changelog"])
665                 version_history.close()
666                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
667                                       self.pkg.changes_file[:-8]+".versions")
668                 os.rename(temp_filename, filename)
669                 os.chmod(filename, 0644)
670
671             # Write out the binary -> source mapping.
672             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
673             debinfo = os.fdopen(fd, 'w')
674             for name, entry in sorted(self.pkg.files.items()):
675                 if entry["type"] == "deb":
676                     line = " ".join([entry["package"], entry["version"],
677                                      entry["architecture"], entry["source package"],
678                                      entry["source version"]])
679                     debinfo.write(line+"\n")
680             debinfo.close()
681             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
682                                   self.pkg.changes_file[:-8]+".debinfo")
683             os.rename(temp_filename, filename)
684             os.chmod(filename, 0644)
685
686         # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
687         # <Ganneff> we do call queue_build too
688         # <mhy> well yes, we'd have had to if we were inserting into accepted
689         # <Ganneff> now. thats database only.
690         # <mhy> urgh, that's going to get messy
691         # <Ganneff> so i make the p-n call to it *also* using accepted/
692         # <mhy> but then the packages will be in the queue_build table without the files being there
693         # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
694         # <mhy> ah, good point
695         # <Ganneff> so it will work out, as unchecked move it over
696         # <mhy> that's all completely sick
697         # <Ganneff> yes
698
699         # This routine returns None on success or an error on failure
700         res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
701         if res:
702             utils.fubar(res)
703
704
705     def check_override (self):
706         """
707         Checks override entries for validity. Mails "Override disparity" warnings,
708         if that feature is enabled.
709
710         Abandons the check if
711           - override disparity checks are disabled
712           - mail sending is disabled
713         """
714
715         cnf = Config()
716
717         # Abandon the check if:
718         #  a) override disparity checks have been disabled
719         #  b) we're not sending mail
720         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
721            cnf["Dinstall::Options::No-Mail"]:
722             return
723
724         summary = self.pkg.check_override()
725
726         if summary == "":
727             return
728
729         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
730
731         self.Subst["__SUMMARY__"] = summary
732         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
733         utils.send_mail(mail_message)
734         del self.Subst["__SUMMARY__"]
735
736     ###########################################################################
737     def force_reject(self, reject_files):
738         """
739         Forcefully move files from the current directory to the
740         reject directory.  If any file already exists in the reject
741         directory it will be moved to the morgue to make way for
742         the new file.
743
744         @type files: dict
745         @param files: file dictionary
746
747         """
748
749         cnf = Config()
750
751         for file_entry in reject_files:
752             # Skip any files which don't exist or which we don't have permission to copy.
753             if os.access(file_entry, os.R_OK) == 0:
754                 continue
755
756             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
757
758             try:
759                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
760             except OSError, e:
761                 # File exists?  Let's try and move it to the morgue
762                 if e.errno == errno.EEXIST:
763                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
764                     try:
765                         morgue_file = utils.find_next_free(morgue_file)
766                     except NoFreeFilenameError:
767                         # Something's either gone badly Pete Tong, or
768                         # someone is trying to exploit us.
769                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
770                         return
771                     utils.move(dest_file, morgue_file, perms=0660)
772                     try:
773                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
774                     except OSError, e:
775                         # Likewise
776                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
777                         return
778                 else:
779                     raise
780             # If we got here, we own the destination file, so we can
781             # safely overwrite it.
782             utils.move(file_entry, dest_file, 1, perms=0660)
783             os.close(dest_fd)
784
785     ###########################################################################
786     def do_reject (self, manual=0, reject_message="", note=""):
787         """
788         Reject an upload. If called without a reject message or C{manual} is
789         true, spawn an editor so the user can write one.
790
791         @type manual: bool
792         @param manual: manual or automated rejection
793
794         @type reject_message: string
795         @param reject_message: A reject message
796
797         @return: 0
798
799         """
800         # If we weren't given a manual rejection message, spawn an
801         # editor so the user can add one in...
802         if manual and not reject_message:
803             (fd, temp_filename) = utils.temp_filename()
804             temp_file = os.fdopen(fd, 'w')
805             if len(note) > 0:
806                 for line in note:
807                     temp_file.write(line)
808             temp_file.close()
809             editor = os.environ.get("EDITOR","vi")
810             answer = 'E'
811             while answer == 'E':
812                 os.system("%s %s" % (editor, temp_filename))
813                 temp_fh = utils.open_file(temp_filename)
814                 reject_message = "".join(temp_fh.readlines())
815                 temp_fh.close()
816                 print "Reject message:"
817                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
818                 prompt = "[R]eject, Edit, Abandon, Quit ?"
819                 answer = "XXX"
820                 while prompt.find(answer) == -1:
821                     answer = utils.our_raw_input(prompt)
822                     m = re_default_answer.search(prompt)
823                     if answer == "":
824                         answer = m.group(1)
825                     answer = answer[:1].upper()
826             os.unlink(temp_filename)
827             if answer == 'A':
828                 return 1
829             elif answer == 'Q':
830                 sys.exit(0)
831
832         print "Rejecting.\n"
833
834         cnf = Config()
835
836         reason_filename = self.pkg.changes_file[:-8] + ".reason"
837         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
838
839         # Move all the files into the reject directory
840         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
841         self.force_reject(reject_files)
842
843         # If we fail here someone is probably trying to exploit the race
844         # so let's just raise an exception ...
845         if os.path.exists(reason_filename):
846             os.unlink(reason_filename)
847         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
848
849         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
850
851         if not manual:
852             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
853             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
854             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
855             os.write(reason_fd, reject_message)
856             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
857         else:
858             # Build up the rejection email
859             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
860             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
861             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
862             self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
863             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
864             # Write the rejection email out as the <foo>.reason file
865             os.write(reason_fd, reject_mail_message)
866
867         del self.Subst["__REJECTOR_ADDRESS__"]
868         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
869         del self.Subst["__CC__"]
870
871         os.close(reason_fd)
872
873         # Send the rejection mail if appropriate
874         if not cnf["Dinstall::Options::No-Mail"]:
875             utils.send_mail(reject_mail_message)
876
877         self.Logger.log(["rejected", pkg.changes_file])
878
879         return 0
880
881     ################################################################################
882     def in_override_p(self, package, component, suite, binary_type, file, session=None):
883         """
884         Check if a package already has override entries in the DB
885
886         @type package: string
887         @param package: package name
888
889         @type component: string
890         @param component: database id of the component
891
892         @type suite: int
893         @param suite: database id of the suite
894
895         @type binary_type: string
896         @param binary_type: type of the package
897
898         @type file: string
899         @param file: filename we check
900
901         @return: the database result. But noone cares anyway.
902
903         """
904
905         cnf = Config()
906
907         if session is None:
908             session = DBConn().session()
909
910         if binary_type == "": # must be source
911             file_type = "dsc"
912         else:
913             file_type = binary_type
914
915         # Override suite name; used for example with proposed-updates
916         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
917             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
918
919         result = get_override(package, suite, component, file_type, session)
920
921         # If checking for a source package fall back on the binary override type
922         if file_type == "dsc" and len(result) < 1:
923             result = get_override(package, suite, component, ['deb', 'udeb'], session)
924
925         # Remember the section and priority so we can check them later if appropriate
926         if len(result) > 0:
927             result = result[0]
928             self.pkg.files[file]["override section"] = result.section.section
929             self.pkg.files[file]["override priority"] = result.priority.priority
930             return result
931
932         return None
933
934     ################################################################################
935     def get_anyversion(self, sv_list, suite):
936         """
937         @type sv_list: list
938         @param sv_list: list of (suite, version) tuples to check
939
940         @type suite: string
941         @param suite: suite name
942
943         Description: TODO
944         """
945         anyversion = None
946         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
947         for (s, v) in sv_list:
948             if s in [ x.lower() for x in anysuite ]:
949                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
950                     anyversion = v
951
952         return anyversion
953
954     ################################################################################
955
956     def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
957         """
958         @type sv_list: list
959         @param sv_list: list of (suite, version) tuples to check
960
961         @type file: string
962         @param file: XXX
963
964         @type new_version: string
965         @param new_version: XXX
966
967         Ensure versions are newer than existing packages in target
968         suites and that cross-suite version checking rules as
969         set out in the conf file are satisfied.
970         """
971
972         cnf = Config()
973
974         # Check versions for each target suite
975         for target_suite in self.pkg.changes["distribution"].keys():
976             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
977             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
978
979             # Enforce "must be newer than target suite" even if conffile omits it
980             if target_suite not in must_be_newer_than:
981                 must_be_newer_than.append(target_suite)
982
983             for (suite, existent_version) in sv_list:
984                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
985
986                 if suite in must_be_newer_than and sourceful and vercmp < 1:
987                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
988
989                 if suite in must_be_older_than and vercmp > -1:
990                     cansave = 0
991
992                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
993                         # we really use the other suite, ignoring the conflicting one ...
994                         addsuite = self.pkg.changes["distribution-version"][suite]
995
996                         add_version = self.get_anyversion(sv_list, addsuite)
997                         target_version = self.get_anyversion(sv_list, target_suite)
998
999                         if not add_version:
1000                             # not add_version can only happen if we map to a suite
1001                             # that doesn't enhance the suite we're propup'ing from.
1002                             # so "propup-ver x a b c; map a d" is a problem only if
1003                             # d doesn't enhance a.
1004                             #
1005                             # i think we could always propagate in this case, rather
1006                             # than complaining. either way, this isn't a REJECT issue
1007                             #
1008                             # And - we really should complain to the dorks who configured dak
1009                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1010                             self.pkg.changes.setdefault("propdistribution", {})
1011                             self.pkg.changes["propdistribution"][addsuite] = 1
1012                             cansave = 1
1013                         elif not target_version:
1014                             # not targets_version is true when the package is NEW
1015                             # we could just stick with the "...old version..." REJECT
1016                             # for this, I think.
1017                             self.rejects.append("Won't propogate NEW packages.")
1018                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1019                             # propogation would be redundant. no need to reject though.
1020                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1021                             cansave = 1
1022                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1023                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
1024                             # propogate!!
1025                             self.warnings.append("Propogating upload to %s" % (addsuite))
1026                             self.pkg.changes.setdefault("propdistribution", {})
1027                             self.pkg.changes["propdistribution"][addsuite] = 1
1028                             cansave = 1
1029
1030                     if not cansave:
1031                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1032
1033     ################################################################################
1034
1035     def check_binary_against_db(self, file, session=None):
1036         """
1037
1038         """
1039
1040         if session is None:
1041             session = DBConn().session()
1042
1043         # Ensure version is sane
1044         q = session.query(BinAssociation)
1045         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1046         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1047
1048         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1049                                        file, files[file]["version"], sourceful=False)
1050
1051         # Check for any existing copies of the file
1052         q = session.query(DBBinary).filter_by(files[file]["package"])
1053         q = q.filter_by(version=files[file]["version"])
1054         q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
1055
1056         if q.count() > 0:
1057             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1058
1059     ################################################################################
1060
1061     def check_source_against_db(self, file, session=None):
1062         """
1063         """
1064         if session is None:
1065             session = DBConn().session()
1066
1067         source = self.pkg.dsc.get("source")
1068         version = self.pkg.dsc.get("version")
1069
1070         # Ensure version is sane
1071         q = session.query(SrcAssociation)
1072         q = q.join(DBSource).filter(DBSource.source==source)
1073
1074         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1075                                        file, version, sourceful=True)
1076
1077     ################################################################################
1078     def check_dsc_against_db(self, file):
1079         """
1080
1081         @warning: NB: this function can remove entries from the 'files' index [if
1082          the .orig.tar.gz is a duplicate of the one in the archive]; if
1083          you're iterating over 'files' and call this function as part of
1084          the loop, be sure to add a check to the top of the loop to
1085          ensure you haven't just tried to dereference the deleted entry.
1086
1087         """
1088         self.pkg.orig_tar_gz = None
1089
1090         # Try and find all files mentioned in the .dsc.  This has
1091         # to work harder to cope with the multiple possible
1092         # locations of an .orig.tar.gz.
1093         # The ordering on the select is needed to pick the newest orig
1094         # when it exists in multiple places.
1095         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1096             found = None
1097             if self.pkg.files.has_key(dsc_name):
1098                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1099                 actual_size = int(self.pkg.files[dsc_name]["size"])
1100                 found = "%s in incoming" % (dsc_name)
1101
1102                 # Check the file does not already exist in the archive
1103                 ql = get_poolfile_like_name(dsc_name)
1104
1105                 # Strip out anything that isn't '%s' or '/%s$'
1106                 for i in ql:
1107                     if not i.filename.endswith(dsc_name):
1108                         ql.remove(i)
1109
1110                 # "[dak] has not broken them.  [dak] has fixed a
1111                 # brokenness.  Your crappy hack exploited a bug in
1112                 # the old dinstall.
1113                 #
1114                 # "(Come on!  I thought it was always obvious that
1115                 # one just doesn't release different files with
1116                 # the same name and version.)"
1117                 #                        -- ajk@ on d-devel@l.d.o
1118
1119                 if len(ql) > 0:
1120                     # Ignore exact matches for .orig.tar.gz
1121                     match = 0
1122                     if dsc_name.endswith(".orig.tar.gz"):
1123                         for i in ql:
1124                             if self.pkg.files.has_key(dsc_name) and \
1125                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
1126                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
1127                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
1128                                 # TODO: Don't delete the entry, just mark it as not needed
1129                                 # This would fix the stupidity of changing something we often iterate over
1130                                 # whilst we're doing it
1131                                 del files[dsc_name]
1132                                 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
1133                                 match = 1
1134
1135                     if not match:
1136                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
1137
1138             elif dsc_name.endswith(".orig.tar.gz"):
1139                 # Check in the pool
1140                 ql = get_poolfile_like_name(dsc_name)
1141
1142                 # Strip out anything that isn't '%s' or '/%s$'
1143                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
1144                 for i in ql:
1145                     if not i.filename.endswith(dsc_name):
1146                         ql.remove(i)
1147
1148                 if len(ql) > 0:
1149                     # Unfortunately, we may get more than one match here if,
1150                     # for example, the package was in potato but had an -sa
1151                     # upload in woody.  So we need to choose the right one.
1152
1153                     # default to something sane in case we don't match any or have only one
1154                     x = ql[0]
1155
1156                     if len(ql) > 1:
1157                         for i in ql:
1158                             old_file = os.path.join(i.location.path, i.filename)
1159                             old_file_fh = utils.open_file(old_file)
1160                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1161                             old_file_fh.close()
1162                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1163                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
1164                                 x = i
1165
1166                     old_file = os.path.join(i.location.path, i.filename)
1167                     old_file_fh = utils.open_file(old_file)
1168                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1169                     old_file_fh.close()
1170                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1171                     found = old_file
1172                     suite_type = f.location.archive_type
1173                     # need this for updating dsc_files in install()
1174                     dsc_entry["files id"] = f.file_id
1175                     # See install() in process-accepted...
1176                     self.pkg.orig_tar_id = f.file_id
1177                     self.pkg.orig_tar_gz = old_file
1178                     self.pkg.orig_tar_location = f.location.location_id
1179                 else:
1180                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
1181                     # Not there? Check the queue directories...
1182                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1183                         in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
1184                         if os.path.exists(in_otherdir):
1185                             in_otherdir_fh = utils.open_file(in_otherdir)
1186                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1187                             in_otherdir_fh.close()
1188                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1189                             found = in_otherdir
1190                             self.pkg.orig_tar_gz = in_otherdir
1191
1192                     if not found:
1193                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
1194                         self.pkg.orig_tar_gz = -1
1195                         continue
1196             else:
1197                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
1198                 continue
1199             if actual_md5 != dsc_entry["md5sum"]:
1200                 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
1201             if actual_size != int(dsc_entry["size"]):
1202                 self.rejects.append("size for %s doesn't match %s." % (found, file))
1203