]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
docstrings
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118 #        if f["type"] == "byhand":
119 #            continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     # Fix up the list of target suites
152     cnf = Config()
153     for suite in changes["suite"].keys():
154         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155         if override:
156             (olderr, newerr) = (get_suite(suite, session) == None,
157                                 get_suite(override, session) == None)
158             if olderr or newerr:
159                 (oinv, newinv) = ("", "")
160                 if olderr: oinv = "invalid "
161                 if newerr: ninv = "invalid "
162                 print "warning: overriding %ssuite %s to %ssuite %s" % (
163                         oinv, suite, ninv, override)
164             del changes["suite"][suite]
165             changes["suite"][override] = 1
166
167     for suite in changes["suite"].keys():
168         for pkg in new.keys():
169             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170             if len(ql) > 0:
171                 for file_entry in new[pkg]["files"]:
172                     if files[file_entry].has_key("new"):
173                         del files[file_entry]["new"]
174                 del new[pkg]
175
176     if warn:
177         for s in ['stable', 'oldstable']:
178             if changes["suite"].has_key(s):
179                 print "WARNING: overrides will be added for %s!" % s
180         for pkg in new.keys():
181             if new[pkg].has_key("othercomponents"):
182                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
183
184     session.close()
185
186     return new
187
188 ################################################################################
189
190 def check_valid(new):
191     """
192     Check if section and priority for NEW packages exist in database.
193     Additionally does sanity checks:
194       - debian-installer packages have to be udeb (or source)
195       - non debian-installer packages can not be udeb
196       - source priority can only be assigned to dsc file types
197
198     @type new: dict
199     @param new: Dict of new packages with their section, priority and type.
200
201     """
202     for pkg in new.keys():
203         section_name = new[pkg]["section"]
204         priority_name = new[pkg]["priority"]
205         file_type = new[pkg]["type"]
206
207         section = get_section(section_name)
208         if section is None:
209             new[pkg]["section id"] = -1
210         else:
211             new[pkg]["section id"] = section.section_id
212
213         priority = get_priority(priority_name)
214         if priority is None:
215             new[pkg]["priority id"] = -1
216         else:
217             new[pkg]["priority id"] = priority.priority_id
218
219         # Sanity checks
220         di = section_name.find("debian-installer") != -1
221
222         # If d-i, we must be udeb and vice-versa
223         if     (di and file_type not in ("udeb", "dsc")) or \
224            (not di and file_type == "udeb"):
225             new[pkg]["section id"] = -1
226
227         # If dsc we need to be source and vice-versa
228         if (priority == "source" and file_type != "dsc") or \
229            (priority != "source" and file_type == "dsc"):
230             new[pkg]["priority id"] = -1
231
232 ###############################################################################
233
234 def check_status(files):
235     new = byhand = 0
236     for f in files.keys():
237         if files[f].has_key("byhand"):
238             byhand = 1
239         elif files[f].has_key("new"):
240             new = 1
241     return (new, byhand)
242
243 ###############################################################################
244
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247     def __init__(self, future_cutoff, past_cutoff):
248         self.reset()
249         self.future_cutoff = future_cutoff
250         self.past_cutoff = past_cutoff
251
252     def reset(self):
253         self.future_files = {}
254         self.ancient_files = {}
255
256     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257         if MTime > self.future_cutoff:
258             self.future_files[Name] = MTime
259         if MTime < self.past_cutoff:
260             self.ancient_files[Name] = MTime
261
262 ###############################################################################
263
264 class Upload(object):
265     """
266     Everything that has to do with an upload processed.
267
268     """
269     def __init__(self):
270         self.logger = None
271         self.pkg = Changes()
272         self.reset()
273
274     ###########################################################################
275
276     def reset (self):
277         """ Reset a number of internal variables."""
278
279         # Initialize the substitution template map
280         cnf = Config()
281         self.Subst = {}
282         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286
287         self.rejects = []
288         self.warnings = []
289         self.notes = []
290
291         self.later_check_files = []
292
293         self.pkg.reset()
294
295     def package_info(self):
296         """
297         Format various messages from this Upload to send to the maintainer.
298         """
299
300         msgs = (
301             ('Reject Reasons', self.rejects),
302             ('Warnings', self.warnings),
303             ('Notes', self.notes),
304         )
305
306         msg = ''
307         for title, messages in msgs:
308             if messages:
309                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
310         msg += '\n\n'
311
312         return msg
313
314     ###########################################################################
315     def update_subst(self):
316         """ Set up the per-package template substitution mappings """
317
318         cnf = Config()
319
320         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
321         if not self.pkg.changes.has_key("architecture") or not \
322            isinstance(self.pkg.changes["architecture"], dict):
323             self.pkg.changes["architecture"] = { "Unknown" : "" }
324
325         # and maintainer2047 may not exist.
326         if not self.pkg.changes.has_key("maintainer2047"):
327             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
328
329         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
330         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
331         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
332
333         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
334         if self.pkg.changes["architecture"].has_key("source") and \
335            self.pkg.changes["changedby822"] != "" and \
336            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
337
338             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
339             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
340             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
341         else:
342             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
343             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
344             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
345
346         if "sponsoremail" in self.pkg.changes:
347             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
348
349         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
350             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
351
352         # Apply any global override of the Maintainer field
353         if cnf.get("Dinstall::OverrideMaintainer"):
354             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
355             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
356
357         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
358         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
359         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
360
361     ###########################################################################
362     def load_changes(self, filename):
363         """
364         Load a changes file and setup a dictionary around it. Also checks for mandantory
365         fields  within.
366
367         @type: string
368         @param: Changes filename, full path.
369
370         @rtype: boolean
371         @return: whether the changes file was valid or not.  We may want to
372                  reject even if this is True (see what gets put in self.rejects).
373                  This is simply to prevent us even trying things later which will
374                  fail because we couldn't properly parse the file.
375         """
376         Cnf = Config()
377         self.pkg.changes_file = filename
378
379         # Parse the .changes field into a dictionary
380         try:
381             self.pkg.changes.update(parse_changes(filename))
382         except CantOpenError:
383             self.rejects.append("%s: can't read file." % (filename))
384             return False
385         except ParseChangesError, line:
386             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
387             return False
388         except ChangesUnicodeError:
389             self.rejects.append("%s: changes file not proper utf-8" % (filename))
390             return False
391
392         # Parse the Files field from the .changes into another dictionary
393         try:
394             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
395         except ParseChangesError, line:
396             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
397             return False
398         except UnknownFormatError, format:
399             self.rejects.append("%s: unknown format '%s'." % (filename, format))
400             return False
401
402         # Check for mandatory fields
403         for i in ("distribution", "source", "binary", "architecture",
404                   "version", "maintainer", "files", "changes", "description"):
405             if not self.pkg.changes.has_key(i):
406                 # Avoid undefined errors later
407                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
408                 return False
409
410         # Strip a source version in brackets from the source field
411         if re_strip_srcver.search(self.pkg.changes["source"]):
412             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
413
414         # Ensure the source field is a valid package name.
415         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
416             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
417
418         # Split multi-value fields into a lower-level dictionary
419         for i in ("architecture", "distribution", "binary", "closes"):
420             o = self.pkg.changes.get(i, "")
421             if o != "":
422                 del self.pkg.changes[i]
423
424             self.pkg.changes[i] = {}
425
426             for j in o.split():
427                 self.pkg.changes[i][j] = 1
428
429         # Fix the Maintainer: field to be RFC822/2047 compatible
430         try:
431             (self.pkg.changes["maintainer822"],
432              self.pkg.changes["maintainer2047"],
433              self.pkg.changes["maintainername"],
434              self.pkg.changes["maintaineremail"]) = \
435                    fix_maintainer (self.pkg.changes["maintainer"])
436         except ParseMaintError, msg:
437             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
438                    % (filename, self.pkg.changes["maintainer"], msg))
439
440         # ...likewise for the Changed-By: field if it exists.
441         try:
442             (self.pkg.changes["changedby822"],
443              self.pkg.changes["changedby2047"],
444              self.pkg.changes["changedbyname"],
445              self.pkg.changes["changedbyemail"]) = \
446                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
447         except ParseMaintError, msg:
448             self.pkg.changes["changedby822"] = ""
449             self.pkg.changes["changedby2047"] = ""
450             self.pkg.changes["changedbyname"] = ""
451             self.pkg.changes["changedbyemail"] = ""
452
453             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
454                    % (filename, self.pkg.changes["changed-by"], msg))
455
456         # Ensure all the values in Closes: are numbers
457         if self.pkg.changes.has_key("closes"):
458             for i in self.pkg.changes["closes"].keys():
459                 if re_isanum.match (i) == None:
460                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
461
462         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
463         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
464         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
465
466         # Check the .changes is non-empty
467         if not self.pkg.files:
468             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
469             return False
470
471         # Changes was syntactically valid even if we'll reject
472         return True
473
474     ###########################################################################
475
476     def check_distributions(self):
477         "Check and map the Distribution field"
478
479         Cnf = Config()
480
481         # Handle suite mappings
482         for m in Cnf.ValueList("SuiteMappings"):
483             args = m.split()
484             mtype = args[0]
485             if mtype == "map" or mtype == "silent-map":
486                 (source, dest) = args[1:3]
487                 if self.pkg.changes["distribution"].has_key(source):
488                     del self.pkg.changes["distribution"][source]
489                     self.pkg.changes["distribution"][dest] = 1
490                     if mtype != "silent-map":
491                         self.notes.append("Mapping %s to %s." % (source, dest))
492                 if self.pkg.changes.has_key("distribution-version"):
493                     if self.pkg.changes["distribution-version"].has_key(source):
494                         self.pkg.changes["distribution-version"][source]=dest
495             elif mtype == "map-unreleased":
496                 (source, dest) = args[1:3]
497                 if self.pkg.changes["distribution"].has_key(source):
498                     for arch in self.pkg.changes["architecture"].keys():
499                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
500                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
501                             del self.pkg.changes["distribution"][source]
502                             self.pkg.changes["distribution"][dest] = 1
503                             break
504             elif mtype == "ignore":
505                 suite = args[1]
506                 if self.pkg.changes["distribution"].has_key(suite):
507                     del self.pkg.changes["distribution"][suite]
508                     self.warnings.append("Ignoring %s as a target suite." % (suite))
509             elif mtype == "reject":
510                 suite = args[1]
511                 if self.pkg.changes["distribution"].has_key(suite):
512                     self.rejects.append("Uploads to %s are not accepted." % (suite))
513             elif mtype == "propup-version":
514                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
515                 #
516                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
517                 if self.pkg.changes["distribution"].has_key(args[1]):
518                     self.pkg.changes.setdefault("distribution-version", {})
519                     for suite in args[2:]:
520                         self.pkg.changes["distribution-version"][suite] = suite
521
522         # Ensure there is (still) a target distribution
523         if len(self.pkg.changes["distribution"].keys()) < 1:
524             self.rejects.append("No valid distribution remaining.")
525
526         # Ensure target distributions exist
527         for suite in self.pkg.changes["distribution"].keys():
528             if not Cnf.has_key("Suite::%s" % (suite)):
529                 self.rejects.append("Unknown distribution `%s'." % (suite))
530
531     ###########################################################################
532
533     def binary_file_checks(self, f, session):
534         cnf = Config()
535         entry = self.pkg.files[f]
536
537         # Extract package control information
538         deb_file = utils.open_file(f)
539         try:
540             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
541         except:
542             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
543             deb_file.close()
544             # Can't continue, none of the checks on control would work.
545             return
546
547         # Check for mandantory "Description:"
548         deb_file.seek(0)
549         try:
550             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
551         except:
552             self.rejects.append("%s: Missing Description in binary package" % (f))
553             return
554
555         deb_file.close()
556
557         # Check for mandatory fields
558         for field in [ "Package", "Architecture", "Version" ]:
559             if control.Find(field) == None:
560                 # Can't continue
561                 self.rejects.append("%s: No %s field in control." % (f, field))
562                 return
563
564         # Ensure the package name matches the one give in the .changes
565         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
566             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
567
568         # Validate the package field
569         package = control.Find("Package")
570         if not re_valid_pkg_name.match(package):
571             self.rejects.append("%s: invalid package name '%s'." % (f, package))
572
573         # Validate the version field
574         version = control.Find("Version")
575         if not re_valid_version.match(version):
576             self.rejects.append("%s: invalid version number '%s'." % (f, version))
577
578         # Ensure the architecture of the .deb is one we know about.
579         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
580         architecture = control.Find("Architecture")
581         upload_suite = self.pkg.changes["distribution"].keys()[0]
582
583         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
584             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
585             self.rejects.append("Unknown architecture '%s'." % (architecture))
586
587         # Ensure the architecture of the .deb is one of the ones
588         # listed in the .changes.
589         if not self.pkg.changes["architecture"].has_key(architecture):
590             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
591
592         # Sanity-check the Depends field
593         depends = control.Find("Depends")
594         if depends == '':
595             self.rejects.append("%s: Depends field is empty." % (f))
596
597         # Sanity-check the Provides field
598         provides = control.Find("Provides")
599         if provides:
600             provide = re_spacestrip.sub('', provides)
601             if provide == '':
602                 self.rejects.append("%s: Provides field is empty." % (f))
603             prov_list = provide.split(",")
604             for prov in prov_list:
605                 if not re_valid_pkg_name.match(prov):
606                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
607
608         # Check the section & priority match those given in the .changes (non-fatal)
609         if     control.Find("Section") and entry["section"] != "" \
610            and entry["section"] != control.Find("Section"):
611             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
612                                 (f, control.Find("Section", ""), entry["section"]))
613         if control.Find("Priority") and entry["priority"] != "" \
614            and entry["priority"] != control.Find("Priority"):
615             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
616                                 (f, control.Find("Priority", ""), entry["priority"]))
617
618         entry["package"] = package
619         entry["architecture"] = architecture
620         entry["version"] = version
621         entry["maintainer"] = control.Find("Maintainer", "")
622
623         if f.endswith(".udeb"):
624             self.pkg.files[f]["dbtype"] = "udeb"
625         elif f.endswith(".deb"):
626             self.pkg.files[f]["dbtype"] = "deb"
627         else:
628             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
629
630         entry["source"] = control.Find("Source", entry["package"])
631
632         # Get the source version
633         source = entry["source"]
634         source_version = ""
635
636         if source.find("(") != -1:
637             m = re_extract_src_version.match(source)
638             source = m.group(1)
639             source_version = m.group(2)
640
641         if not source_version:
642             source_version = self.pkg.files[f]["version"]
643
644         entry["source package"] = source
645         entry["source version"] = source_version
646
647         # Ensure the filename matches the contents of the .deb
648         m = re_isadeb.match(f)
649
650         #  package name
651         file_package = m.group(1)
652         if entry["package"] != file_package:
653             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
654                                 (f, file_package, entry["dbtype"], entry["package"]))
655         epochless_version = re_no_epoch.sub('', control.Find("Version"))
656
657         #  version
658         file_version = m.group(2)
659         if epochless_version != file_version:
660             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
661                                 (f, file_version, entry["dbtype"], epochless_version))
662
663         #  architecture
664         file_architecture = m.group(3)
665         if entry["architecture"] != file_architecture:
666             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
667                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
668
669         # Check for existent source
670         source_version = entry["source version"]
671         source_package = entry["source package"]
672         if self.pkg.changes["architecture"].has_key("source"):
673             if source_version != self.pkg.changes["version"]:
674                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
675                                     (source_version, f, self.pkg.changes["version"]))
676         else:
677             # Check in the SQL database
678             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
679                 # Check in one of the other directories
680                 source_epochless_version = re_no_epoch.sub('', source_version)
681                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
682                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
683                     entry["byhand"] = 1
684                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
685                     entry["new"] = 1
686                 else:
687                     dsc_file_exists = False
688                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
689                         if cnf.has_key("Dir::Queue::%s" % (myq)):
690                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
691                                 dsc_file_exists = True
692                                 break
693
694                     if not dsc_file_exists:
695                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
696
697         # Check the version and for file overwrites
698         self.check_binary_against_db(f, session)
699
700         # Temporarily disable contents generation until we change the table storage layout
701         #b = Binary(f)
702         #b.scan_package()
703         #if len(b.rejects) > 0:
704         #    for j in b.rejects:
705         #        self.rejects.append(j)
706
707     def source_file_checks(self, f, session):
708         entry = self.pkg.files[f]
709
710         m = re_issource.match(f)
711         if not m:
712             return
713
714         entry["package"] = m.group(1)
715         entry["version"] = m.group(2)
716         entry["type"] = m.group(3)
717
718         # Ensure the source package name matches the Source filed in the .changes
719         if self.pkg.changes["source"] != entry["package"]:
720             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
721
722         # Ensure the source version matches the version in the .changes file
723         if re_is_orig_source.match(f):
724             changes_version = self.pkg.changes["chopversion2"]
725         else:
726             changes_version = self.pkg.changes["chopversion"]
727
728         if changes_version != entry["version"]:
729             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
730
731         # Ensure the .changes lists source in the Architecture field
732         if not self.pkg.changes["architecture"].has_key("source"):
733             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
734
735         # Check the signature of a .dsc file
736         if entry["type"] == "dsc":
737             # check_signature returns either:
738             #  (None, [list, of, rejects]) or (signature, [])
739             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
740             for j in rejects:
741                 self.rejects.append(j)
742
743         entry["architecture"] = "source"
744
745     def per_suite_file_checks(self, f, suite, session):
746         cnf = Config()
747         entry = self.pkg.files[f]
748
749         # Skip byhand
750         if entry.has_key("byhand"):
751             return
752
753         # Check we have fields we need to do these checks
754         oktogo = True
755         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
756             if not entry.has_key(m):
757                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
758                 oktogo = False
759
760         if not oktogo:
761             return
762
763         # Handle component mappings
764         for m in cnf.ValueList("ComponentMappings"):
765             (source, dest) = m.split()
766             if entry["component"] == source:
767                 entry["original component"] = source
768                 entry["component"] = dest
769
770         # Ensure the component is valid for the target suite
771         if cnf.has_key("Suite:%s::Components" % (suite)) and \
772            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
773             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
774             return
775
776         # Validate the component
777         if not get_component(entry["component"], session):
778             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
779             return
780
781         # See if the package is NEW
782         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
783             entry["new"] = 1
784
785         # Validate the priority
786         if entry["priority"].find('/') != -1:
787             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
788
789         # Determine the location
790         location = cnf["Dir::Pool"]
791         l = get_location(location, entry["component"], session=session)
792         if l is None:
793             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
794             entry["location id"] = -1
795         else:
796             entry["location id"] = l.location_id
797
798         # Check the md5sum & size against existing files (if any)
799         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
800
801         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
802                                          entry["size"], entry["md5sum"], entry["location id"])
803
804         if found is None:
805             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
806         elif found is False and poolfile is not None:
807             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
808         else:
809             if poolfile is None:
810                 entry["files id"] = None
811             else:
812                 entry["files id"] = poolfile.file_id
813
814         # Check for packages that have moved from one component to another
815         entry['suite'] = suite
816         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
817         if res.rowcount > 0:
818             entry["othercomponents"] = res.fetchone()[0]
819
820     def check_files(self, action=True):
821         file_keys = self.pkg.files.keys()
822         holding = Holding()
823         cnf = Config()
824
825         if action:
826             cwd = os.getcwd()
827             os.chdir(self.pkg.directory)
828             for f in file_keys:
829                 ret = holding.copy_to_holding(f)
830                 if ret is not None:
831                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
832
833             os.chdir(cwd)
834
835         # check we already know the changes file
836         # [NB: this check must be done post-suite mapping]
837         base_filename = os.path.basename(self.pkg.changes_file)
838
839         session = DBConn().session()
840
841         try:
842             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
843             # if in the pool or in a queue other than unchecked, reject
844             if (dbc.in_queue is None) \
845                    or (dbc.in_queue is not None
846                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
847                 self.rejects.append("%s file already known to dak" % base_filename)
848         except NoResultFound, e:
849             # not known, good
850             pass
851
852         has_binaries = False
853         has_source = False
854
855         for f, entry in self.pkg.files.items():
856             # Ensure the file does not already exist in one of the accepted directories
857             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
858                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
859                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
860                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
861
862             if not re_taint_free.match(f):
863                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
864
865             # Check the file is readable
866             if os.access(f, os.R_OK) == 0:
867                 # When running in -n, copy_to_holding() won't have
868                 # generated the reject_message, so we need to.
869                 if action:
870                     if os.path.exists(f):
871                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
872                     else:
873                         # Don't directly reject, mark to check later to deal with orig's
874                         # we can find in the pool
875                         self.later_check_files.append(f)
876                 entry["type"] = "unreadable"
877                 continue
878
879             # If it's byhand skip remaining checks
880             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
881                 entry["byhand"] = 1
882                 entry["type"] = "byhand"
883
884             # Checks for a binary package...
885             elif re_isadeb.match(f):
886                 has_binaries = True
887                 entry["type"] = "deb"
888
889                 # This routine appends to self.rejects/warnings as appropriate
890                 self.binary_file_checks(f, session)
891
892             # Checks for a source package...
893             elif re_issource.match(f):
894                 has_source = True
895
896                 # This routine appends to self.rejects/warnings as appropriate
897                 self.source_file_checks(f, session)
898
899             # Not a binary or source package?  Assume byhand...
900             else:
901                 entry["byhand"] = 1
902                 entry["type"] = "byhand"
903
904             # Per-suite file checks
905             entry["oldfiles"] = {}
906             for suite in self.pkg.changes["distribution"].keys():
907                 self.per_suite_file_checks(f, suite, session)
908
909         session.close()
910
911         # If the .changes file says it has source, it must have source.
912         if self.pkg.changes["architecture"].has_key("source"):
913             if not has_source:
914                 self.rejects.append("no source found and Architecture line in changes mention source.")
915
916             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
917                 self.rejects.append("source only uploads are not supported.")
918
919     ###########################################################################
920     def check_dsc(self, action=True, session=None):
921         """Returns bool indicating whether or not the source changes are valid"""
922         # Ensure there is source to check
923         if not self.pkg.changes["architecture"].has_key("source"):
924             return True
925
926         # Find the .dsc
927         dsc_filename = None
928         for f, entry in self.pkg.files.items():
929             if entry["type"] == "dsc":
930                 if dsc_filename:
931                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
932                     return False
933                 else:
934                     dsc_filename = f
935
936         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
937         if not dsc_filename:
938             self.rejects.append("source uploads must contain a dsc file")
939             return False
940
941         # Parse the .dsc file
942         try:
943             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
944         except CantOpenError:
945             # if not -n copy_to_holding() will have done this for us...
946             if not action:
947                 self.rejects.append("%s: can't read file." % (dsc_filename))
948         except ParseChangesError, line:
949             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
950         except InvalidDscError, line:
951             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
952         except ChangesUnicodeError:
953             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
954
955         # Build up the file list of files mentioned by the .dsc
956         try:
957             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
958         except NoFilesFieldError:
959             self.rejects.append("%s: no Files: field." % (dsc_filename))
960             return False
961         except UnknownFormatError, format:
962             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
963             return False
964         except ParseChangesError, line:
965             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
966             return False
967
968         # Enforce mandatory fields
969         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
970             if not self.pkg.dsc.has_key(i):
971                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
972                 return False
973
974         # Validate the source and version fields
975         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
976             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
977         if not re_valid_version.match(self.pkg.dsc["version"]):
978             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
979
980         # Only a limited list of source formats are allowed in each suite
981         for dist in self.pkg.changes["distribution"].keys():
982             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
983             if self.pkg.dsc["format"] not in allowed:
984                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
985
986         # Validate the Maintainer field
987         try:
988             # We ignore the return value
989             fix_maintainer(self.pkg.dsc["maintainer"])
990         except ParseMaintError, msg:
991             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
992                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
993
994         # Validate the build-depends field(s)
995         for field_name in [ "build-depends", "build-depends-indep" ]:
996             field = self.pkg.dsc.get(field_name)
997             if field:
998                 # Have apt try to parse them...
999                 try:
1000                     apt_pkg.ParseSrcDepends(field)
1001                 except:
1002                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1003
1004         # Ensure the version number in the .dsc matches the version number in the .changes
1005         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1006         changes_version = self.pkg.files[dsc_filename]["version"]
1007
1008         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1009             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1010
1011         # Ensure the Files field contain only what's expected
1012         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1013
1014         # Ensure source is newer than existing source in target suites
1015         session = DBConn().session()
1016         self.check_source_against_db(dsc_filename, session)
1017         self.check_dsc_against_db(dsc_filename, session)
1018         session.close()
1019
1020         # Finally, check if we're missing any files
1021         for f in self.later_check_files:
1022             self.rejects.append("Could not find file %s references in changes" % f)
1023
1024         return True
1025
1026     ###########################################################################
1027
1028     def get_changelog_versions(self, source_dir):
1029         """Extracts a the source package and (optionally) grabs the
1030         version history out of debian/changelog for the BTS."""
1031
1032         cnf = Config()
1033
1034         # Find the .dsc (again)
1035         dsc_filename = None
1036         for f in self.pkg.files.keys():
1037             if self.pkg.files[f]["type"] == "dsc":
1038                 dsc_filename = f
1039
1040         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1041         if not dsc_filename:
1042             return
1043
1044         # Create a symlink mirror of the source files in our temporary directory
1045         for f in self.pkg.files.keys():
1046             m = re_issource.match(f)
1047             if m:
1048                 src = os.path.join(source_dir, f)
1049                 # If a file is missing for whatever reason, give up.
1050                 if not os.path.exists(src):
1051                     return
1052                 ftype = m.group(3)
1053                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1054                    self.pkg.orig_files[f].has_key("path"):
1055                     continue
1056                 dest = os.path.join(os.getcwd(), f)
1057                 os.symlink(src, dest)
1058
1059         # If the orig files are not a part of the upload, create symlinks to the
1060         # existing copies.
1061         for orig_file in self.pkg.orig_files.keys():
1062             if not self.pkg.orig_files[orig_file].has_key("path"):
1063                 continue
1064             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1065             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1066
1067         # Extract the source
1068         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1069         (result, output) = commands.getstatusoutput(cmd)
1070         if (result != 0):
1071             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1072             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1073             return
1074
1075         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1076             return
1077
1078         # Get the upstream version
1079         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1080         if re_strip_revision.search(upstr_version):
1081             upstr_version = re_strip_revision.sub('', upstr_version)
1082
1083         # Ensure the changelog file exists
1084         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1085         if not os.path.exists(changelog_filename):
1086             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1087             return
1088
1089         # Parse the changelog
1090         self.pkg.dsc["bts changelog"] = ""
1091         changelog_file = utils.open_file(changelog_filename)
1092         for line in changelog_file.readlines():
1093             m = re_changelog_versions.match(line)
1094             if m:
1095                 self.pkg.dsc["bts changelog"] += line
1096         changelog_file.close()
1097
1098         # Check we found at least one revision in the changelog
1099         if not self.pkg.dsc["bts changelog"]:
1100             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1101
1102     def check_source(self):
1103         # Bail out if:
1104         #    a) there's no source
1105         if not self.pkg.changes["architecture"].has_key("source"):
1106             return
1107
1108         tmpdir = utils.temp_dirname()
1109
1110         # Move into the temporary directory
1111         cwd = os.getcwd()
1112         os.chdir(tmpdir)
1113
1114         # Get the changelog version history
1115         self.get_changelog_versions(cwd)
1116
1117         # Move back and cleanup the temporary tree
1118         os.chdir(cwd)
1119
1120         try:
1121             shutil.rmtree(tmpdir)
1122         except OSError, e:
1123             if e.errno != errno.EACCES:
1124                 print "foobar"
1125                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1126
1127             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1128             # We probably have u-r or u-w directories so chmod everything
1129             # and try again.
1130             cmd = "chmod -R u+rwx %s" % (tmpdir)
1131             result = os.system(cmd)
1132             if result != 0:
1133                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1134             shutil.rmtree(tmpdir)
1135         except Exception, e:
1136             print "foobar2 (%s)" % e
1137             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1138
1139     ###########################################################################
1140     def ensure_hashes(self):
1141         # Make sure we recognise the format of the Files: field in the .changes
1142         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1143         if len(format) == 2:
1144             format = int(format[0]), int(format[1])
1145         else:
1146             format = int(float(format[0])), 0
1147
1148         # We need to deal with the original changes blob, as the fields we need
1149         # might not be in the changes dict serialised into the .dak anymore.
1150         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1151
1152         # Copy the checksums over to the current changes dict.  This will keep
1153         # the existing modifications to it intact.
1154         for field in orig_changes:
1155             if field.startswith('checksums-'):
1156                 self.pkg.changes[field] = orig_changes[field]
1157
1158         # Check for unsupported hashes
1159         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1160             self.rejects.append(j)
1161
1162         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1163             self.rejects.append(j)
1164
1165         # We have to calculate the hash if we have an earlier changes version than
1166         # the hash appears in rather than require it exist in the changes file
1167         for hashname, hashfunc, version in utils.known_hashes:
1168             # TODO: Move _ensure_changes_hash into this class
1169             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1170                 self.rejects.append(j)
1171             if "source" in self.pkg.changes["architecture"]:
1172                 # TODO: Move _ensure_dsc_hash into this class
1173                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1174                     self.rejects.append(j)
1175
1176     def check_hashes(self):
1177         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1178             self.rejects.append(m)
1179
1180         for m in utils.check_size(".changes", self.pkg.files):
1181             self.rejects.append(m)
1182
1183         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1184             self.rejects.append(m)
1185
1186         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1187             self.rejects.append(m)
1188
1189         self.ensure_hashes()
1190
1191     ###########################################################################
1192
1193     def ensure_orig(self, target_dir='.', session=None):
1194         """
1195         Ensures that all orig files mentioned in the changes file are present
1196         in target_dir. If they do not exist, they are symlinked into place.
1197
1198         An list containing the symlinks that were created are returned (so they
1199         can be removed).
1200         """
1201
1202         symlinked = []
1203         cnf = Config()
1204
1205         for filename, entry in self.pkg.dsc_files.iteritems():
1206             if not re_is_orig_source.match(filename):
1207                 # File is not an orig; ignore
1208                 continue
1209
1210             if os.path.exists(filename):
1211                 # File exists, no need to continue
1212                 continue
1213
1214             def symlink_if_valid(path):
1215                 f = utils.open_file(path)
1216                 md5sum = apt_pkg.md5sum(f)
1217                 f.close()
1218
1219                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1220                 expected = (int(entry['size']), entry['md5sum'])
1221
1222                 if fingerprint != expected:
1223                     return False
1224
1225                 dest = os.path.join(target_dir, filename)
1226
1227                 os.symlink(path, dest)
1228                 symlinked.append(dest)
1229
1230                 return True
1231
1232             session_ = session
1233             if session is None:
1234                 session_ = DBConn().session()
1235
1236             found = False
1237
1238             # Look in the pool
1239             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1240                 poolfile_path = os.path.join(
1241                     poolfile.location.path, poolfile.filename
1242                 )
1243
1244                 if symlink_if_valid(poolfile_path):
1245                     found = True
1246                     break
1247
1248             if session is None:
1249                 session_.close()
1250
1251             if found:
1252                 continue
1253
1254             # Look in some other queues for the file
1255             queues = ('New', 'Byhand', 'ProposedUpdates',
1256                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1257
1258             for queue in queues:
1259                 if not cnf.get('Dir::Queue::%s' % queue):
1260                     continue
1261
1262                 queuefile_path = os.path.join(
1263                     cnf['Dir::Queue::%s' % queue], filename
1264                 )
1265
1266                 if not os.path.exists(queuefile_path):
1267                     # Does not exist in this queue
1268                     continue
1269
1270                 if symlink_if_valid(queuefile_path):
1271                     break
1272
1273         return symlinked
1274
1275     ###########################################################################
1276
1277     def check_lintian(self):
1278         """
1279         Extends self.rejects by checking the output of lintian against tags
1280         specified in Dinstall::LintianTags.
1281         """
1282
1283         cnf = Config()
1284
1285         # Don't reject binary uploads
1286         if not self.pkg.changes['architecture'].has_key('source'):
1287             return
1288
1289         # Only check some distributions
1290         for dist in ('unstable', 'experimental'):
1291             if dist in self.pkg.changes['distribution']:
1292                 break
1293         else:
1294             return
1295
1296         # If we do not have a tagfile, don't do anything
1297         tagfile = cnf.get("Dinstall::LintianTags")
1298         if tagfile is None:
1299             return
1300
1301         # Parse the yaml file
1302         sourcefile = file(tagfile, 'r')
1303         sourcecontent = sourcefile.read()
1304         sourcefile.close()
1305
1306         try:
1307             lintiantags = yaml.load(sourcecontent)['lintian']
1308         except yaml.YAMLError, msg:
1309             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1310             return
1311
1312         # Try and find all orig mentioned in the .dsc
1313         symlinked = self.ensure_orig()
1314
1315         # Setup the input file for lintian
1316         fd, temp_filename = utils.temp_filename()
1317         temptagfile = os.fdopen(fd, 'w')
1318         for tags in lintiantags.values():
1319             temptagfile.writelines(['%s\n' % x for x in tags])
1320         temptagfile.close()
1321
1322         try:
1323             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1324                 (temp_filename, self.pkg.changes_file)
1325
1326             result, output = commands.getstatusoutput(cmd)
1327         finally:
1328             # Remove our tempfile and any symlinks we created
1329             os.unlink(temp_filename)
1330
1331             for symlink in symlinked:
1332                 os.unlink(symlink)
1333
1334         if result == 2:
1335             utils.warn("lintian failed for %s [return code: %s]." % \
1336                 (self.pkg.changes_file, result))
1337             utils.warn(utils.prefix_multi_line_string(output, \
1338                 " [possible output:] "))
1339
1340         def log(*txt):
1341             if self.logger:
1342                 self.logger.log(
1343                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1344                 )
1345
1346         # Generate messages
1347         parsed_tags = parse_lintian_output(output)
1348         self.rejects.extend(
1349             generate_reject_messages(parsed_tags, lintiantags, log=log)
1350         )
1351
1352     ###########################################################################
1353     def check_urgency(self):
1354         cnf = Config()
1355         if self.pkg.changes["architecture"].has_key("source"):
1356             if not self.pkg.changes.has_key("urgency"):
1357                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1358             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1359             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1360                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1361                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1362                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1363
1364     ###########################################################################
1365
1366     # Sanity check the time stamps of files inside debs.
1367     # [Files in the near future cause ugly warnings and extreme time
1368     #  travel can cause errors on extraction]
1369
1370     def check_timestamps(self):
1371         Cnf = Config()
1372
1373         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1374         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1375         tar = TarTime(future_cutoff, past_cutoff)
1376
1377         for filename, entry in self.pkg.files.items():
1378             if entry["type"] == "deb":
1379                 tar.reset()
1380                 try:
1381                     deb_file = utils.open_file(filename)
1382                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1383                     deb_file.seek(0)
1384                     try:
1385                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1386                     except SystemError, e:
1387                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1388                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1389                             raise
1390                         deb_file.seek(0)
1391                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1392
1393                     deb_file.close()
1394
1395                     future_files = tar.future_files.keys()
1396                     if future_files:
1397                         num_future_files = len(future_files)
1398                         future_file = future_files[0]
1399                         future_date = tar.future_files[future_file]
1400                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1401                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1402
1403                     ancient_files = tar.ancient_files.keys()
1404                     if ancient_files:
1405                         num_ancient_files = len(ancient_files)
1406                         ancient_file = ancient_files[0]
1407                         ancient_date = tar.ancient_files[ancient_file]
1408                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1409                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1410                 except:
1411                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1412
1413     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1414         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1415             sponsored = False
1416         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1417             sponsored = False
1418             if uid_name == "":
1419                 sponsored = True
1420         else:
1421             sponsored = True
1422             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1423                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1424                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1425                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1426                         self.pkg.changes["sponsoremail"] = uid_email
1427
1428         return sponsored
1429
1430
1431     ###########################################################################
1432     # check_signed_by_key checks
1433     ###########################################################################
1434
1435     def check_signed_by_key(self):
1436         """Ensure the .changes is signed by an authorized uploader."""
1437         session = DBConn().session()
1438
1439         # First of all we check that the person has proper upload permissions
1440         # and that this upload isn't blocked
1441         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1442
1443         if fpr is None:
1444             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1445             return
1446
1447         # TODO: Check that import-keyring adds UIDs properly
1448         if not fpr.uid:
1449             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1450             return
1451
1452         # Check that the fingerprint which uploaded has permission to do so
1453         self.check_upload_permissions(fpr, session)
1454
1455         # Check that this package is not in a transition
1456         self.check_transition(session)
1457
1458         session.close()
1459
1460
1461     def check_upload_permissions(self, fpr, session):
1462         # Check any one-off upload blocks
1463         self.check_upload_blocks(fpr, session)
1464
1465         # Start with DM as a special case
1466         # DM is a special case unfortunately, so we check it first
1467         # (keys with no source access get more access than DMs in one
1468         #  way; DMs can only upload for their packages whether source
1469         #  or binary, whereas keys with no access might be able to
1470         #  upload some binaries)
1471         if fpr.source_acl.access_level == 'dm':
1472             self.check_dm_upload(fpr, session)
1473         else:
1474             # Check source-based permissions for other types
1475             if self.pkg.changes["architecture"].has_key("source") and \
1476                 fpr.source_acl.access_level is None:
1477                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1478                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1479                 self.rejects.append(rej)
1480                 return
1481             # If not a DM, we allow full upload rights
1482             uid_email = "%s@debian.org" % (fpr.uid.uid)
1483             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1484
1485
1486         # Check binary upload permissions
1487         # By this point we know that DMs can't have got here unless they
1488         # are allowed to deal with the package concerned so just apply
1489         # normal checks
1490         if fpr.binary_acl.access_level == 'full':
1491             return
1492
1493         # Otherwise we're in the map case
1494         tmparches = self.pkg.changes["architecture"].copy()
1495         tmparches.pop('source', None)
1496
1497         for bam in fpr.binary_acl_map:
1498             tmparches.pop(bam.architecture.arch_string, None)
1499
1500         if len(tmparches.keys()) > 0:
1501             if fpr.binary_reject:
1502                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1503                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1504                 self.rejects.append(rej)
1505             else:
1506                 # TODO: This is where we'll implement reject vs throw away binaries later
1507                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1508                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1509                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1510                 self.rejects.append(rej)
1511
1512
1513     def check_upload_blocks(self, fpr, session):
1514         """Check whether any upload blocks apply to this source, source
1515            version, uid / fpr combination"""
1516
1517         def block_rej_template(fb):
1518             rej = 'Manual upload block in place for package %s' % fb.source
1519             if fb.version is not None:
1520                 rej += ', version %s' % fb.version
1521             return rej
1522
1523         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1524             # version is None if the block applies to all versions
1525             if fb.version is None or fb.version == self.pkg.changes['version']:
1526                 # Check both fpr and uid - either is enough to cause a reject
1527                 if fb.fpr is not None:
1528                     if fb.fpr.fingerprint == fpr.fingerprint:
1529                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1530                 if fb.uid is not None:
1531                     if fb.uid == fpr.uid:
1532                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1533
1534
1535     def check_dm_upload(self, fpr, session):
1536         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1537         ## none of the uploaded packages are NEW
1538         rej = False
1539         for f in self.pkg.files.keys():
1540             if self.pkg.files[f].has_key("byhand"):
1541                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1542                 rej = True
1543             if self.pkg.files[f].has_key("new"):
1544                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1545                 rej = True
1546
1547         if rej:
1548             return
1549
1550         ## the most recent version of the package uploaded to unstable or
1551         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1552         ## section of its control file
1553         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1554         q = q.join(SrcAssociation)
1555         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1556         q = q.order_by(desc('source.version')).limit(1)
1557
1558         r = q.all()
1559
1560         if len(r) != 1:
1561             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1562             self.rejects.append(rej)
1563             return
1564
1565         r = r[0]
1566         if not r.dm_upload_allowed:
1567             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1568             self.rejects.append(rej)
1569             return
1570
1571         ## the Maintainer: field of the uploaded .changes file corresponds with
1572         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1573         ## uploads)
1574         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1575             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1576
1577         ## the most recent version of the package uploaded to unstable or
1578         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1579         ## non-developer maintainers cannot NMU or hijack packages)
1580
1581         # srcuploaders includes the maintainer
1582         accept = False
1583         for sup in r.srcuploaders:
1584             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1585             # Eww - I hope we never have two people with the same name in Debian
1586             if email == fpr.uid.uid or name == fpr.uid.name:
1587                 accept = True
1588                 break
1589
1590         if not accept:
1591             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1592             return
1593
1594         ## none of the packages are being taken over from other source packages
1595         for b in self.pkg.changes["binary"].keys():
1596             for suite in self.pkg.changes["distribution"].keys():
1597                 q = session.query(DBSource)
1598                 q = q.join(DBBinary).filter_by(package=b)
1599                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1600
1601                 for s in q.all():
1602                     if s.source != self.pkg.changes["source"]:
1603                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1604
1605
1606
1607     def check_transition(self, session):
1608         cnf = Config()
1609
1610         sourcepkg = self.pkg.changes["source"]
1611
1612         # No sourceful upload -> no need to do anything else, direct return
1613         # We also work with unstable uploads, not experimental or those going to some
1614         # proposed-updates queue
1615         if "source" not in self.pkg.changes["architecture"] or \
1616            "unstable" not in self.pkg.changes["distribution"]:
1617             return
1618
1619         # Also only check if there is a file defined (and existant) with
1620         # checks.
1621         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1622         if transpath == "" or not os.path.exists(transpath):
1623             return
1624
1625         # Parse the yaml file
1626         sourcefile = file(transpath, 'r')
1627         sourcecontent = sourcefile.read()
1628         try:
1629             transitions = yaml.load(sourcecontent)
1630         except yaml.YAMLError, msg:
1631             # This shouldn't happen, there is a wrapper to edit the file which
1632             # checks it, but we prefer to be safe than ending up rejecting
1633             # everything.
1634             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1635             return
1636
1637         # Now look through all defined transitions
1638         for trans in transitions:
1639             t = transitions[trans]
1640             source = t["source"]
1641             expected = t["new"]
1642
1643             # Will be None if nothing is in testing.
1644             current = get_source_in_suite(source, "testing", session)
1645             if current is not None:
1646                 compare = apt_pkg.VersionCompare(current.version, expected)
1647
1648             if current is None or compare < 0:
1649                 # This is still valid, the current version in testing is older than
1650                 # the new version we wait for, or there is none in testing yet
1651
1652                 # Check if the source we look at is affected by this.
1653                 if sourcepkg in t['packages']:
1654                     # The source is affected, lets reject it.
1655
1656                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1657                         sourcepkg, trans)
1658
1659                     if current is not None:
1660                         currentlymsg = "at version %s" % (current.version)
1661                     else:
1662                         currentlymsg = "not present in testing"
1663
1664                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1665
1666                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1667 is part of a testing transition designed to get %s migrated (it is
1668 currently %s, we need version %s).  This transition is managed by the
1669 Release Team, and %s is the Release-Team member responsible for it.
1670 Please mail debian-release@lists.debian.org or contact %s directly if you
1671 need further assistance.  You might want to upload to experimental until this
1672 transition is done."""
1673                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1674
1675                     self.rejects.append(rejectmsg)
1676                     return
1677
1678     ###########################################################################
1679     # End check_signed_by_key checks
1680     ###########################################################################
1681
1682     def build_summaries(self):
1683         """ Build a summary of changes the upload introduces. """
1684
1685         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1686
1687         short_summary = summary
1688
1689         # This is for direport's benefit...
1690         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1691
1692         if byhand or new:
1693             summary += "Changes: " + f
1694
1695         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1696
1697         summary += self.announce(short_summary, 0)
1698
1699         return (summary, short_summary)
1700
1701     ###########################################################################
1702
1703     def close_bugs(self, summary, action):
1704         """
1705         Send mail to close bugs as instructed by the closes field in the changes file.
1706         Also add a line to summary if any work was done.
1707
1708         @type summary: string
1709         @param summary: summary text, as given by L{build_summaries}
1710
1711         @type action: bool
1712         @param action: Set to false no real action will be done.
1713
1714         @rtype: string
1715         @return: summary. If action was taken, extended by the list of closed bugs.
1716
1717         """
1718
1719         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1720
1721         bugs = self.pkg.changes["closes"].keys()
1722
1723         if not bugs:
1724             return summary
1725
1726         bugs.sort()
1727         summary += "Closing bugs: "
1728         for bug in bugs:
1729             summary += "%s " % (bug)
1730             if action:
1731                 self.update_subst()
1732                 self.Subst["__BUG_NUMBER__"] = bug
1733                 if self.pkg.changes["distribution"].has_key("stable"):
1734                     self.Subst["__STABLE_WARNING__"] = """
1735 Note that this package is not part of the released stable Debian
1736 distribution.  It may have dependencies on other unreleased software,
1737 or other instabilities.  Please take care if you wish to install it.
1738 The update will eventually make its way into the next released Debian
1739 distribution."""
1740                 else:
1741                     self.Subst["__STABLE_WARNING__"] = ""
1742                 mail_message = utils.TemplateSubst(self.Subst, template)
1743                 utils.send_mail(mail_message)
1744
1745                 # Clear up after ourselves
1746                 del self.Subst["__BUG_NUMBER__"]
1747                 del self.Subst["__STABLE_WARNING__"]
1748
1749         if action and self.logger:
1750             self.logger.log(["closing bugs"] + bugs)
1751
1752         summary += "\n"
1753
1754         return summary
1755
1756     ###########################################################################
1757
1758     def announce(self, short_summary, action):
1759         """
1760         Send an announce mail about a new upload.
1761
1762         @type short_summary: string
1763         @param short_summary: Short summary text to include in the mail
1764
1765         @type action: bool
1766         @param action: Set to false no real action will be done.
1767
1768         @rtype: string
1769         @return: Textstring about action taken.
1770
1771         """
1772
1773         cnf = Config()
1774         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1775
1776         # Only do announcements for source uploads with a recent dpkg-dev installed
1777         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1778            self.pkg.changes["architecture"].has_key("source"):
1779             return ""
1780
1781         lists_done = {}
1782         summary = ""
1783
1784         self.Subst["__SHORT_SUMMARY__"] = short_summary
1785
1786         for dist in self.pkg.changes["distribution"].keys():
1787             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1788             if announce_list == "" or lists_done.has_key(announce_list):
1789                 continue
1790
1791             lists_done[announce_list] = 1
1792             summary += "Announcing to %s\n" % (announce_list)
1793
1794             if action:
1795                 self.update_subst()
1796                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1797                 if cnf.get("Dinstall::TrackingServer") and \
1798                    self.pkg.changes["architecture"].has_key("source"):
1799                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1800                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1801
1802                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1803                 utils.send_mail(mail_message)
1804
1805                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1806
1807         if cnf.FindB("Dinstall::CloseBugs"):
1808             summary = self.close_bugs(summary, action)
1809
1810         del self.Subst["__SHORT_SUMMARY__"]
1811
1812         return summary
1813
1814     ###########################################################################
1815     @session_wrapper
1816     def accept (self, summary, short_summary, session=None):
1817         """
1818         Accept an upload.
1819
1820         This moves all files referenced from the .changes into the pool,
1821         sends the accepted mail, announces to lists, closes bugs and
1822         also checks for override disparities. If enabled it will write out
1823         the version history for the BTS Version Tracking and will finally call
1824         L{queue_build}.
1825
1826         @type summary: string
1827         @param summary: Summary text
1828
1829         @type short_summary: string
1830         @param short_summary: Short summary
1831         """
1832
1833         cnf = Config()
1834         stats = SummaryStats()
1835
1836         print "Installing."
1837         self.logger.log(["installing changes", self.pkg.changes_file])
1838
1839         poolfiles = []
1840
1841         # Add the .dsc file to the DB first
1842         for newfile, entry in self.pkg.files.items():
1843             if entry["type"] == "dsc":
1844                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1845                 for j in pfs:
1846                     poolfiles.append(j)
1847
1848         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1849         for newfile, entry in self.pkg.files.items():
1850             if entry["type"] == "deb":
1851                 poolfiles.append(add_deb_to_db(self, newfile, session))
1852
1853         # If this is a sourceful diff only upload that is moving
1854         # cross-component we need to copy the .orig files into the new
1855         # component too for the same reasons as above.
1856         # XXX: mhy: I think this should be in add_dsc_to_db
1857         if self.pkg.changes["architecture"].has_key("source"):
1858             for orig_file in self.pkg.orig_files.keys():
1859                 if not self.pkg.orig_files[orig_file].has_key("id"):
1860                     continue # Skip if it's not in the pool
1861                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1862                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1863                     continue # Skip if the location didn't change
1864
1865                 # Do the move
1866                 oldf = get_poolfile_by_id(orig_file_id, session)
1867                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1868                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1869                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1870
1871                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1872
1873                 # TODO: Care about size/md5sum collisions etc
1874                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1875
1876                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1877                 if newf is None:
1878                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1879                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1880
1881                     session.flush()
1882
1883                     # Don't reference the old file from this changes
1884                     for p in poolfiles:
1885                         if p.file_id == oldf.file_id:
1886                             poolfiles.remove(p)
1887
1888                     poolfiles.append(newf)
1889
1890                     # Fix up the DSC references
1891                     toremove = []
1892
1893                     for df in source.srcfiles:
1894                         if df.poolfile.file_id == oldf.file_id:
1895                             # Add a new DSC entry and mark the old one for deletion
1896                             # Don't do it in the loop so we don't change the thing we're iterating over
1897                             newdscf = DSCFile()
1898                             newdscf.source_id = source.source_id
1899                             newdscf.poolfile_id = newf.file_id
1900                             session.add(newdscf)
1901
1902                             toremove.append(df)
1903
1904                     for df in toremove:
1905                         session.delete(df)
1906
1907                     # Flush our changes
1908                     session.flush()
1909
1910                     # Make sure that our source object is up-to-date
1911                     session.expire(source)
1912
1913         # Install the files into the pool
1914         for newfile, entry in self.pkg.files.items():
1915             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1916             utils.move(newfile, destination)
1917             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1918             stats.accept_bytes += float(entry["size"])
1919
1920         # Copy the .changes file across for suite which need it.
1921         copy_changes = {}
1922         for suite_name in self.pkg.changes["distribution"].keys():
1923             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1924                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1925
1926         for dest in copy_changes.keys():
1927             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1928
1929         # We're done - commit the database changes
1930         session.commit()
1931         # Our SQL session will automatically start a new transaction after
1932         # the last commit
1933
1934         # Move the .changes into the 'done' directory
1935         utils.move(self.pkg.changes_file,
1936                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1937
1938         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1939             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1940
1941         self.update_subst()
1942         self.Subst["__SUITE__"] = ""
1943         self.Subst["__SUMMARY__"] = summary
1944         mail_message = utils.TemplateSubst(self.Subst,
1945                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1946         utils.send_mail(mail_message)
1947         self.announce(short_summary, 1)
1948
1949         ## Helper stuff for DebBugs Version Tracking
1950         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1951             if self.pkg.changes["architecture"].has_key("source"):
1952                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1953                 version_history = os.fdopen(fd, 'w')
1954                 version_history.write(self.pkg.dsc["bts changelog"])
1955                 version_history.close()
1956                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1957                                       self.pkg.changes_file[:-8]+".versions")
1958                 os.rename(temp_filename, filename)
1959                 os.chmod(filename, 0644)
1960
1961             # Write out the binary -> source mapping.
1962             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1963             debinfo = os.fdopen(fd, 'w')
1964             for name, entry in sorted(self.pkg.files.items()):
1965                 if entry["type"] == "deb":
1966                     line = " ".join([entry["package"], entry["version"],
1967                                      entry["architecture"], entry["source package"],
1968                                      entry["source version"]])
1969                     debinfo.write(line+"\n")
1970             debinfo.close()
1971             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1972                                   self.pkg.changes_file[:-8]+".debinfo")
1973             os.rename(temp_filename, filename)
1974             os.chmod(filename, 0644)
1975
1976         session.commit()
1977
1978         # Set up our copy queues (e.g. buildd queues)
1979         for suite_name in self.pkg.changes["distribution"].keys():
1980             suite = get_suite(suite_name, session)
1981             for q in suite.copy_queues:
1982                 for f in poolfiles:
1983                     q.add_file_from_pool(f)
1984
1985         session.commit()
1986
1987         # Finally...
1988         stats.accept_count += 1
1989
1990     def check_override(self):
1991         """
1992         Checks override entries for validity. Mails "Override disparity" warnings,
1993         if that feature is enabled.
1994
1995         Abandons the check if
1996           - override disparity checks are disabled
1997           - mail sending is disabled
1998         """
1999
2000         cnf = Config()
2001
2002         # Abandon the check if override disparity checks have been disabled
2003         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2004             return
2005
2006         summary = self.pkg.check_override()
2007
2008         if summary == "":
2009             return
2010
2011         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2012
2013         self.update_subst()
2014         self.Subst["__SUMMARY__"] = summary
2015         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2016         utils.send_mail(mail_message)
2017         del self.Subst["__SUMMARY__"]
2018
2019     ###########################################################################
2020
2021     def remove(self, from_dir=None):
2022         """
2023         Used (for instance) in p-u to remove the package from unchecked
2024
2025         Also removes the package from holding area.
2026         """
2027         if from_dir is None:
2028             from_dir = self.pkg.directory
2029         h = Holding()
2030
2031         for f in self.pkg.files.keys():
2032             os.unlink(os.path.join(from_dir, f))
2033             if os.path.exists(os.path.join(h.holding_dir, f)):
2034                 os.unlink(os.path.join(h.holding_dir, f))
2035
2036         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2037         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2038             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2039
2040     ###########################################################################
2041
2042     def move_to_queue (self, queue):
2043         """
2044         Move files to a destination queue using the permissions in the table
2045         """
2046         h = Holding()
2047         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2048                    queue.path, perms=int(queue.change_perms, 8))
2049         for f in self.pkg.files.keys():
2050             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2051
2052     ###########################################################################
2053
2054     def force_reject(self, reject_files):
2055         """
2056         Forcefully move files from the current directory to the
2057         reject directory.  If any file already exists in the reject
2058         directory it will be moved to the morgue to make way for
2059         the new file.
2060
2061         @type reject_files: dict
2062         @param reject_files: file dictionary
2063
2064         """
2065
2066         cnf = Config()
2067
2068         for file_entry in reject_files:
2069             # Skip any files which don't exist or which we don't have permission to copy.
2070             if os.access(file_entry, os.R_OK) == 0:
2071                 continue
2072
2073             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2074
2075             try:
2076                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2077             except OSError, e:
2078                 # File exists?  Let's find a new name by adding a number
2079                 if e.errno == errno.EEXIST:
2080                     try:
2081                         dest_file = utils.find_next_free(dest_file, 255)
2082                     except NoFreeFilenameError:
2083                         # Something's either gone badly Pete Tong, or
2084                         # someone is trying to exploit us.
2085                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2086                         return
2087
2088                     # Make sure we really got it
2089                     try:
2090                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2091                     except OSError, e:
2092                         # Likewise
2093                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2094                         return
2095                 else:
2096                     raise
2097             # If we got here, we own the destination file, so we can
2098             # safely overwrite it.
2099             utils.move(file_entry, dest_file, 1, perms=0660)
2100             os.close(dest_fd)
2101
2102     ###########################################################################
2103     def do_reject (self, manual=0, reject_message="", notes=""):
2104         """
2105         Reject an upload. If called without a reject message or C{manual} is
2106         true, spawn an editor so the user can write one.
2107
2108         @type manual: bool
2109         @param manual: manual or automated rejection
2110
2111         @type reject_message: string
2112         @param reject_message: A reject message
2113
2114         @return: 0
2115
2116         """
2117         # If we weren't given a manual rejection message, spawn an
2118         # editor so the user can add one in...
2119         if manual and not reject_message:
2120             (fd, temp_filename) = utils.temp_filename()
2121             temp_file = os.fdopen(fd, 'w')
2122             if len(notes) > 0:
2123                 for note in notes:
2124                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2125                                     % (note.author, note.version, note.notedate, note.comment))
2126             temp_file.close()
2127             editor = os.environ.get("EDITOR","vi")
2128             answer = 'E'
2129             while answer == 'E':
2130                 os.system("%s %s" % (editor, temp_filename))
2131                 temp_fh = utils.open_file(temp_filename)
2132                 reject_message = "".join(temp_fh.readlines())
2133                 temp_fh.close()
2134                 print "Reject message:"
2135                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2136                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2137                 answer = "XXX"
2138                 while prompt.find(answer) == -1:
2139                     answer = utils.our_raw_input(prompt)
2140                     m = re_default_answer.search(prompt)
2141                     if answer == "":
2142                         answer = m.group(1)
2143                     answer = answer[:1].upper()
2144             os.unlink(temp_filename)
2145             if answer == 'A':
2146                 return 1
2147             elif answer == 'Q':
2148                 sys.exit(0)
2149
2150         print "Rejecting.\n"
2151
2152         cnf = Config()
2153
2154         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2155         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2156
2157         # Move all the files into the reject directory
2158         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2159         self.force_reject(reject_files)
2160
2161         # If we fail here someone is probably trying to exploit the race
2162         # so let's just raise an exception ...
2163         if os.path.exists(reason_filename):
2164             os.unlink(reason_filename)
2165         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2166
2167         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2168
2169         self.update_subst()
2170         if not manual:
2171             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2172             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2173             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2174             os.write(reason_fd, reject_message)
2175             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2176         else:
2177             # Build up the rejection email
2178             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2179             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2180             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2181             self.Subst["__REJECT_MESSAGE__"] = ""
2182             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2183             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2184             # Write the rejection email out as the <foo>.reason file
2185             os.write(reason_fd, reject_mail_message)
2186
2187         del self.Subst["__REJECTOR_ADDRESS__"]
2188         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2189         del self.Subst["__CC__"]
2190
2191         os.close(reason_fd)
2192
2193         # Send the rejection mail
2194         utils.send_mail(reject_mail_message)
2195
2196         if self.logger:
2197             self.logger.log(["rejected", self.pkg.changes_file])
2198
2199         return 0
2200
2201     ################################################################################
2202     def in_override_p(self, package, component, suite, binary_type, filename, session):
2203         """
2204         Check if a package already has override entries in the DB
2205
2206         @type package: string
2207         @param package: package name
2208
2209         @type component: string
2210         @param component: database id of the component
2211
2212         @type suite: int
2213         @param suite: database id of the suite
2214
2215         @type binary_type: string
2216         @param binary_type: type of the package
2217
2218         @type filename: string
2219         @param filename: filename we check
2220
2221         @return: the database result. But noone cares anyway.
2222
2223         """
2224
2225         cnf = Config()
2226
2227         if binary_type == "": # must be source
2228             file_type = "dsc"
2229         else:
2230             file_type = binary_type
2231
2232         # Override suite name; used for example with proposed-updates
2233         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2234             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2235
2236         result = get_override(package, suite, component, file_type, session)
2237
2238         # If checking for a source package fall back on the binary override type
2239         if file_type == "dsc" and len(result) < 1:
2240             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2241
2242         # Remember the section and priority so we can check them later if appropriate
2243         if len(result) > 0:
2244             result = result[0]
2245             self.pkg.files[filename]["override section"] = result.section.section
2246             self.pkg.files[filename]["override priority"] = result.priority.priority
2247             return result
2248
2249         return None
2250
2251     ################################################################################
2252     def get_anyversion(self, sv_list, suite):
2253         """
2254         @type sv_list: list
2255         @param sv_list: list of (suite, version) tuples to check
2256
2257         @type suite: string
2258         @param suite: suite name
2259
2260         Description: TODO
2261         """
2262         Cnf = Config()
2263         anyversion = None
2264         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2265         for (s, v) in sv_list:
2266             if s in [ x.lower() for x in anysuite ]:
2267                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2268                     anyversion = v
2269
2270         return anyversion
2271
2272     ################################################################################
2273
2274     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2275         """
2276         @type sv_list: list
2277         @param sv_list: list of (suite, version) tuples to check
2278
2279         @type filename: string
2280         @param filename: XXX
2281
2282         @type new_version: string
2283         @param new_version: XXX
2284
2285         Ensure versions are newer than existing packages in target
2286         suites and that cross-suite version checking rules as
2287         set out in the conf file are satisfied.
2288         """
2289
2290         cnf = Config()
2291
2292         # Check versions for each target suite
2293         for target_suite in self.pkg.changes["distribution"].keys():
2294             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2295             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2296
2297             # Enforce "must be newer than target suite" even if conffile omits it
2298             if target_suite not in must_be_newer_than:
2299                 must_be_newer_than.append(target_suite)
2300
2301             for (suite, existent_version) in sv_list:
2302                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2303
2304                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2305                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2306
2307                 if suite in must_be_older_than and vercmp > -1:
2308                     cansave = 0
2309
2310                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2311                         # we really use the other suite, ignoring the conflicting one ...
2312                         addsuite = self.pkg.changes["distribution-version"][suite]
2313
2314                         add_version = self.get_anyversion(sv_list, addsuite)
2315                         target_version = self.get_anyversion(sv_list, target_suite)
2316
2317                         if not add_version:
2318                             # not add_version can only happen if we map to a suite
2319                             # that doesn't enhance the suite we're propup'ing from.
2320                             # so "propup-ver x a b c; map a d" is a problem only if
2321                             # d doesn't enhance a.
2322                             #
2323                             # i think we could always propagate in this case, rather
2324                             # than complaining. either way, this isn't a REJECT issue
2325                             #
2326                             # And - we really should complain to the dorks who configured dak
2327                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2328                             self.pkg.changes.setdefault("propdistribution", {})
2329                             self.pkg.changes["propdistribution"][addsuite] = 1
2330                             cansave = 1
2331                         elif not target_version:
2332                             # not targets_version is true when the package is NEW
2333                             # we could just stick with the "...old version..." REJECT
2334                             # for this, I think.
2335                             self.rejects.append("Won't propogate NEW packages.")
2336                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2337                             # propogation would be redundant. no need to reject though.
2338                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2339                             cansave = 1
2340                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2341                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2342                             # propogate!!
2343                             self.warnings.append("Propogating upload to %s" % (addsuite))
2344                             self.pkg.changes.setdefault("propdistribution", {})
2345                             self.pkg.changes["propdistribution"][addsuite] = 1
2346                             cansave = 1
2347
2348                     if not cansave:
2349                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2350
2351     ################################################################################
2352     def check_binary_against_db(self, filename, session):
2353         # Ensure version is sane
2354         q = session.query(BinAssociation)
2355         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2356         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2357
2358         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2359                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2360
2361         # Check for any existing copies of the file
2362         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2363         q = q.filter_by(version=self.pkg.files[filename]["version"])
2364         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2365
2366         if q.count() > 0:
2367             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2368
2369     ################################################################################
2370
2371     def check_source_against_db(self, filename, session):
2372         source = self.pkg.dsc.get("source")
2373         version = self.pkg.dsc.get("version")
2374
2375         # Ensure version is sane
2376         q = session.query(SrcAssociation)
2377         q = q.join(DBSource).filter(DBSource.source==source)
2378
2379         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2380                                        filename, version, sourceful=True)
2381
2382     ################################################################################
2383     def check_dsc_against_db(self, filename, session):
2384         """
2385
2386         @warning: NB: this function can remove entries from the 'files' index [if
2387          the orig tarball is a duplicate of the one in the archive]; if
2388          you're iterating over 'files' and call this function as part of
2389          the loop, be sure to add a check to the top of the loop to
2390          ensure you haven't just tried to dereference the deleted entry.
2391
2392         """
2393
2394         Cnf = Config()
2395         self.pkg.orig_files = {} # XXX: do we need to clear it?
2396         orig_files = self.pkg.orig_files
2397
2398         # Try and find all files mentioned in the .dsc.  This has
2399         # to work harder to cope with the multiple possible
2400         # locations of an .orig.tar.gz.
2401         # The ordering on the select is needed to pick the newest orig
2402         # when it exists in multiple places.
2403         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2404             found = None
2405             if self.pkg.files.has_key(dsc_name):
2406                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2407                 actual_size = int(self.pkg.files[dsc_name]["size"])
2408                 found = "%s in incoming" % (dsc_name)
2409
2410                 # Check the file does not already exist in the archive
2411                 ql = get_poolfile_like_name(dsc_name, session)
2412
2413                 # Strip out anything that isn't '%s' or '/%s$'
2414                 for i in ql:
2415                     if not i.filename.endswith(dsc_name):
2416                         ql.remove(i)
2417
2418                 # "[dak] has not broken them.  [dak] has fixed a
2419                 # brokenness.  Your crappy hack exploited a bug in
2420                 # the old dinstall.
2421                 #
2422                 # "(Come on!  I thought it was always obvious that
2423                 # one just doesn't release different files with
2424                 # the same name and version.)"
2425                 #                        -- ajk@ on d-devel@l.d.o
2426
2427                 if len(ql) > 0:
2428                     # Ignore exact matches for .orig.tar.gz
2429                     match = 0
2430                     if re_is_orig_source.match(dsc_name):
2431                         for i in ql:
2432                             if self.pkg.files.has_key(dsc_name) and \
2433                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2434                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2435                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2436                                 # TODO: Don't delete the entry, just mark it as not needed
2437                                 # This would fix the stupidity of changing something we often iterate over
2438                                 # whilst we're doing it
2439                                 del self.pkg.files[dsc_name]
2440                                 dsc_entry["files id"] = i.file_id
2441                                 if not orig_files.has_key(dsc_name):
2442                                     orig_files[dsc_name] = {}
2443                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2444                                 match = 1
2445
2446                                 # Don't bitch that we couldn't find this file later
2447                                 try:
2448                                     self.later_check_files.remove(dsc_name)
2449                                 except ValueError:
2450                                     pass
2451
2452
2453                     if not match:
2454                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2455
2456             elif re_is_orig_source.match(dsc_name):
2457                 # Check in the pool
2458                 ql = get_poolfile_like_name(dsc_name, session)
2459
2460                 # Strip out anything that isn't '%s' or '/%s$'
2461                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2462                 for i in ql:
2463                     if not i.filename.endswith(dsc_name):
2464                         ql.remove(i)
2465
2466                 if len(ql) > 0:
2467                     # Unfortunately, we may get more than one match here if,
2468                     # for example, the package was in potato but had an -sa
2469                     # upload in woody.  So we need to choose the right one.
2470
2471                     # default to something sane in case we don't match any or have only one
2472                     x = ql[0]
2473
2474                     if len(ql) > 1:
2475                         for i in ql:
2476                             old_file = os.path.join(i.location.path, i.filename)
2477                             old_file_fh = utils.open_file(old_file)
2478                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2479                             old_file_fh.close()
2480                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2481                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2482                                 x = i
2483
2484                     old_file = os.path.join(i.location.path, i.filename)
2485                     old_file_fh = utils.open_file(old_file)
2486                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2487                     old_file_fh.close()
2488                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2489                     found = old_file
2490                     suite_type = x.location.archive_type
2491                     # need this for updating dsc_files in install()
2492                     dsc_entry["files id"] = x.file_id
2493                     # See install() in process-accepted...
2494                     if not orig_files.has_key(dsc_name):
2495                         orig_files[dsc_name] = {}
2496                     orig_files[dsc_name]["id"] = x.file_id
2497                     orig_files[dsc_name]["path"] = old_file
2498                     orig_files[dsc_name]["location"] = x.location.location_id
2499                 else:
2500                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2501                     # Not there? Check the queue directories...
2502                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2503                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2504                             continue
2505                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2506                         if os.path.exists(in_otherdir):
2507                             in_otherdir_fh = utils.open_file(in_otherdir)
2508                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2509                             in_otherdir_fh.close()
2510                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2511                             found = in_otherdir
2512                             if not orig_files.has_key(dsc_name):
2513                                 orig_files[dsc_name] = {}
2514                             orig_files[dsc_name]["path"] = in_otherdir
2515
2516                     if not found:
2517                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2518                         continue
2519             else:
2520                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2521                 continue
2522             if actual_md5 != dsc_entry["md5sum"]:
2523                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2524             if actual_size != int(dsc_entry["size"]):
2525                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2526
2527     ################################################################################
2528     # This is used by process-new and process-holding to recheck a changes file
2529     # at the time we're running.  It mainly wraps various other internal functions
2530     # and is similar to accepted_checks - these should probably be tidied up
2531     # and combined
2532     def recheck(self, session):
2533         cnf = Config()
2534         for f in self.pkg.files.keys():
2535             # The .orig.tar.gz can disappear out from under us is it's a
2536             # duplicate of one in the archive.
2537             if not self.pkg.files.has_key(f):
2538                 continue
2539
2540             entry = self.pkg.files[f]
2541
2542             # Check that the source still exists
2543             if entry["type"] == "deb":
2544                 source_version = entry["source version"]
2545                 source_package = entry["source package"]
2546                 if not self.pkg.changes["architecture"].has_key("source") \
2547                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2548                     source_epochless_version = re_no_epoch.sub('', source_version)
2549                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2550                     found = False
2551                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2552                         if cnf.has_key("Dir::Queue::%s" % (q)):
2553                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2554                                 found = True
2555                     if not found:
2556                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2557
2558             # Version and file overwrite checks
2559             if entry["type"] == "deb":
2560                 self.check_binary_against_db(f, session)
2561             elif entry["type"] == "dsc":
2562                 self.check_source_against_db(f, session)
2563                 self.check_dsc_against_db(f, session)
2564
2565     ################################################################################
2566     def accepted_checks(self, overwrite_checks, session):
2567         # Recheck anything that relies on the database; since that's not
2568         # frozen between accept and our run time when called from p-a.
2569
2570         # overwrite_checks is set to False when installing to stable/oldstable
2571
2572         propogate={}
2573         nopropogate={}
2574
2575         # Find the .dsc (again)
2576         dsc_filename = None
2577         for f in self.pkg.files.keys():
2578             if self.pkg.files[f]["type"] == "dsc":
2579                 dsc_filename = f
2580
2581         for checkfile in self.pkg.files.keys():
2582             # The .orig.tar.gz can disappear out from under us is it's a
2583             # duplicate of one in the archive.
2584             if not self.pkg.files.has_key(checkfile):
2585                 continue
2586
2587             entry = self.pkg.files[checkfile]
2588
2589             # Check that the source still exists
2590             if entry["type"] == "deb":
2591                 source_version = entry["source version"]
2592                 source_package = entry["source package"]
2593                 if not self.pkg.changes["architecture"].has_key("source") \
2594                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2595                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2596
2597             # Version and file overwrite checks
2598             if overwrite_checks:
2599                 if entry["type"] == "deb":
2600                     self.check_binary_against_db(checkfile, session)
2601                 elif entry["type"] == "dsc":
2602                     self.check_source_against_db(checkfile, session)
2603                     self.check_dsc_against_db(dsc_filename, session)
2604
2605             # propogate in the case it is in the override tables:
2606             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2607                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2608                     propogate[suite] = 1
2609                 else:
2610                     nopropogate[suite] = 1
2611
2612         for suite in propogate.keys():
2613             if suite in nopropogate:
2614                 continue
2615             self.pkg.changes["distribution"][suite] = 1
2616
2617         for checkfile in self.pkg.files.keys():
2618             # Check the package is still in the override tables
2619             for suite in self.pkg.changes["distribution"].keys():
2620                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2621                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2622
2623     ################################################################################
2624     # This is not really a reject, but an unaccept, but since a) the code for
2625     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2626     # extremely rare, for now we'll go with whining at our admin folks...
2627
2628     def do_unaccept(self):
2629         cnf = Config()
2630
2631         self.update_subst()
2632         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2633         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2634         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2635         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2636         if cnf.has_key("Dinstall::Bcc"):
2637             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2638
2639         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2640
2641         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2642
2643         # Write the rejection email out as the <foo>.reason file
2644         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2645         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2646
2647         # If we fail here someone is probably trying to exploit the race
2648         # so let's just raise an exception ...
2649         if os.path.exists(reject_filename):
2650             os.unlink(reject_filename)
2651
2652         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2653         os.write(fd, reject_mail_message)
2654         os.close(fd)
2655
2656         utils.send_mail(reject_mail_message)
2657
2658         del self.Subst["__REJECTOR_ADDRESS__"]
2659         del self.Subst["__REJECT_MESSAGE__"]
2660         del self.Subst["__CC__"]
2661
2662     ################################################################################
2663     # If any file of an upload has a recent mtime then chances are good
2664     # the file is still being uploaded.
2665
2666     def upload_too_new(self):
2667         cnf = Config()
2668         too_new = False
2669         # Move back to the original directory to get accurate time stamps
2670         cwd = os.getcwd()
2671         os.chdir(self.pkg.directory)
2672         file_list = self.pkg.files.keys()
2673         file_list.extend(self.pkg.dsc_files.keys())
2674         file_list.append(self.pkg.changes_file)
2675         for f in file_list:
2676             try:
2677                 last_modified = time.time()-os.path.getmtime(f)
2678                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2679                     too_new = True
2680                     break
2681             except:
2682                 pass
2683
2684         os.chdir(cwd)
2685         return too_new