]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
deal with orig's we have in the DB which are references in the changes file but not...
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118 #        if f["type"] == "byhand":
119 #            continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     # Fix up the list of target suites
152     cnf = Config()
153     for suite in changes["suite"].keys():
154         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155         if override:
156             (olderr, newerr) = (get_suite(suite, session) == None,
157                                 get_suite(override, session) == None)
158             if olderr or newerr:
159                 (oinv, newinv) = ("", "")
160                 if olderr: oinv = "invalid "
161                 if newerr: ninv = "invalid "
162                 print "warning: overriding %ssuite %s to %ssuite %s" % (
163                         oinv, suite, ninv, override)
164             del changes["suite"][suite]
165             changes["suite"][override] = 1
166
167     for suite in changes["suite"].keys():
168         for pkg in new.keys():
169             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170             if len(ql) > 0:
171                 for file_entry in new[pkg]["files"]:
172                     if files[file_entry].has_key("new"):
173                         del files[file_entry]["new"]
174                 del new[pkg]
175
176     if warn:
177         for s in ['stable', 'oldstable']:
178             if changes["suite"].has_key(s):
179                 print "WARNING: overrides will be added for %s!" % s
180         for pkg in new.keys():
181             if new[pkg].has_key("othercomponents"):
182                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
183
184     session.close()
185
186     return new
187
188 ################################################################################
189
190 def check_valid(new):
191     """
192     Check if section and priority for NEW packages exist in database.
193     Additionally does sanity checks:
194       - debian-installer packages have to be udeb (or source)
195       - non debian-installer packages can not be udeb
196       - source priority can only be assigned to dsc file types
197
198     @type new: dict
199     @param new: Dict of new packages with their section, priority and type.
200
201     """
202     for pkg in new.keys():
203         section_name = new[pkg]["section"]
204         priority_name = new[pkg]["priority"]
205         file_type = new[pkg]["type"]
206
207         section = get_section(section_name)
208         if section is None:
209             new[pkg]["section id"] = -1
210         else:
211             new[pkg]["section id"] = section.section_id
212
213         priority = get_priority(priority_name)
214         if priority is None:
215             new[pkg]["priority id"] = -1
216         else:
217             new[pkg]["priority id"] = priority.priority_id
218
219         # Sanity checks
220         di = section_name.find("debian-installer") != -1
221
222         # If d-i, we must be udeb and vice-versa
223         if     (di and file_type not in ("udeb", "dsc")) or \
224            (not di and file_type == "udeb"):
225             new[pkg]["section id"] = -1
226
227         # If dsc we need to be source and vice-versa
228         if (priority == "source" and file_type != "dsc") or \
229            (priority != "source" and file_type == "dsc"):
230             new[pkg]["priority id"] = -1
231
232 ###############################################################################
233
234 def check_status(files):
235     new = byhand = 0
236     for f in files.keys():
237         if files[f].has_key("byhand"):
238             byhand = 1
239         elif files[f].has_key("new"):
240             new = 1
241     return (new, byhand)
242
243 ###############################################################################
244
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247     def __init__(self, future_cutoff, past_cutoff):
248         self.reset()
249         self.future_cutoff = future_cutoff
250         self.past_cutoff = past_cutoff
251
252     def reset(self):
253         self.future_files = {}
254         self.ancient_files = {}
255
256     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257         if MTime > self.future_cutoff:
258             self.future_files[Name] = MTime
259         if MTime < self.past_cutoff:
260             self.ancient_files[Name] = MTime
261
262 ###############################################################################
263
264 class Upload(object):
265     """
266     Everything that has to do with an upload processed.
267
268     """
269     def __init__(self):
270         self.logger = None
271         self.pkg = Changes()
272         self.reset()
273
274     ###########################################################################
275
276     def reset (self):
277         """ Reset a number of internal variables."""
278
279         # Initialize the substitution template map
280         cnf = Config()
281         self.Subst = {}
282         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286
287         self.rejects = []
288         self.warnings = []
289         self.notes = []
290
291         self.later_check_files = []
292
293         self.pkg.reset()
294
295     def package_info(self):
296         """
297         Format various messages from this Upload to send to the maintainer.
298         """
299
300         msgs = (
301             ('Reject Reasons', self.rejects),
302             ('Warnings', self.warnings),
303             ('Notes', self.notes),
304         )
305
306         msg = ''
307         for title, messages in msgs:
308             if messages:
309                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
310         msg += '\n'
311
312         return msg
313
314     ###########################################################################
315     def update_subst(self):
316         """ Set up the per-package template substitution mappings """
317
318         cnf = Config()
319
320         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
321         if not self.pkg.changes.has_key("architecture") or not \
322            isinstance(self.pkg.changes["architecture"], dict):
323             self.pkg.changes["architecture"] = { "Unknown" : "" }
324
325         # and maintainer2047 may not exist.
326         if not self.pkg.changes.has_key("maintainer2047"):
327             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
328
329         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
330         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
331         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
332
333         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
334         if self.pkg.changes["architecture"].has_key("source") and \
335            self.pkg.changes["changedby822"] != "" and \
336            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
337
338             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
339             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
340             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
341         else:
342             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
343             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
344             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
345
346         if "sponsoremail" in self.pkg.changes:
347             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
348
349         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
350             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
351
352         # Apply any global override of the Maintainer field
353         if cnf.get("Dinstall::OverrideMaintainer"):
354             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
355             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
356
357         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
358         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
359         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
360
361     ###########################################################################
362     def load_changes(self, filename):
363         """
364         @rtype: boolean
365         @rvalue: whether the changes file was valid or not.  We may want to
366                  reject even if this is True (see what gets put in self.rejects).
367                  This is simply to prevent us even trying things later which will
368                  fail because we couldn't properly parse the file.
369         """
370         Cnf = Config()
371         self.pkg.changes_file = filename
372
373         # Parse the .changes field into a dictionary
374         try:
375             self.pkg.changes.update(parse_changes(filename))
376         except CantOpenError:
377             self.rejects.append("%s: can't read file." % (filename))
378             return False
379         except ParseChangesError, line:
380             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
381             return False
382         except ChangesUnicodeError:
383             self.rejects.append("%s: changes file not proper utf-8" % (filename))
384             return False
385
386         # Parse the Files field from the .changes into another dictionary
387         try:
388             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
389         except ParseChangesError, line:
390             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
391             return False
392         except UnknownFormatError, format:
393             self.rejects.append("%s: unknown format '%s'." % (filename, format))
394             return False
395
396         # Check for mandatory fields
397         for i in ("distribution", "source", "binary", "architecture",
398                   "version", "maintainer", "files", "changes", "description"):
399             if not self.pkg.changes.has_key(i):
400                 # Avoid undefined errors later
401                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
402                 return False
403
404         # Strip a source version in brackets from the source field
405         if re_strip_srcver.search(self.pkg.changes["source"]):
406             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
407
408         # Ensure the source field is a valid package name.
409         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
410             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
411
412         # Split multi-value fields into a lower-level dictionary
413         for i in ("architecture", "distribution", "binary", "closes"):
414             o = self.pkg.changes.get(i, "")
415             if o != "":
416                 del self.pkg.changes[i]
417
418             self.pkg.changes[i] = {}
419
420             for j in o.split():
421                 self.pkg.changes[i][j] = 1
422
423         # Fix the Maintainer: field to be RFC822/2047 compatible
424         try:
425             (self.pkg.changes["maintainer822"],
426              self.pkg.changes["maintainer2047"],
427              self.pkg.changes["maintainername"],
428              self.pkg.changes["maintaineremail"]) = \
429                    fix_maintainer (self.pkg.changes["maintainer"])
430         except ParseMaintError, msg:
431             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
432                    % (filename, self.pkg.changes["maintainer"], msg))
433
434         # ...likewise for the Changed-By: field if it exists.
435         try:
436             (self.pkg.changes["changedby822"],
437              self.pkg.changes["changedby2047"],
438              self.pkg.changes["changedbyname"],
439              self.pkg.changes["changedbyemail"]) = \
440                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
441         except ParseMaintError, msg:
442             self.pkg.changes["changedby822"] = ""
443             self.pkg.changes["changedby2047"] = ""
444             self.pkg.changes["changedbyname"] = ""
445             self.pkg.changes["changedbyemail"] = ""
446
447             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
448                    % (filename, self.pkg.changes["changed-by"], msg))
449
450         # Ensure all the values in Closes: are numbers
451         if self.pkg.changes.has_key("closes"):
452             for i in self.pkg.changes["closes"].keys():
453                 if re_isanum.match (i) == None:
454                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
455
456         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
457         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
458         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
459
460         # Check the .changes is non-empty
461         if not self.pkg.files:
462             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
463             return False
464
465         # Changes was syntactically valid even if we'll reject
466         return True
467
468     ###########################################################################
469
470     def check_distributions(self):
471         "Check and map the Distribution field"
472
473         Cnf = Config()
474
475         # Handle suite mappings
476         for m in Cnf.ValueList("SuiteMappings"):
477             args = m.split()
478             mtype = args[0]
479             if mtype == "map" or mtype == "silent-map":
480                 (source, dest) = args[1:3]
481                 if self.pkg.changes["distribution"].has_key(source):
482                     del self.pkg.changes["distribution"][source]
483                     self.pkg.changes["distribution"][dest] = 1
484                     if mtype != "silent-map":
485                         self.notes.append("Mapping %s to %s." % (source, dest))
486                 if self.pkg.changes.has_key("distribution-version"):
487                     if self.pkg.changes["distribution-version"].has_key(source):
488                         self.pkg.changes["distribution-version"][source]=dest
489             elif mtype == "map-unreleased":
490                 (source, dest) = args[1:3]
491                 if self.pkg.changes["distribution"].has_key(source):
492                     for arch in self.pkg.changes["architecture"].keys():
493                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
494                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
495                             del self.pkg.changes["distribution"][source]
496                             self.pkg.changes["distribution"][dest] = 1
497                             break
498             elif mtype == "ignore":
499                 suite = args[1]
500                 if self.pkg.changes["distribution"].has_key(suite):
501                     del self.pkg.changes["distribution"][suite]
502                     self.warnings.append("Ignoring %s as a target suite." % (suite))
503             elif mtype == "reject":
504                 suite = args[1]
505                 if self.pkg.changes["distribution"].has_key(suite):
506                     self.rejects.append("Uploads to %s are not accepted." % (suite))
507             elif mtype == "propup-version":
508                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
509                 #
510                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
511                 if self.pkg.changes["distribution"].has_key(args[1]):
512                     self.pkg.changes.setdefault("distribution-version", {})
513                     for suite in args[2:]:
514                         self.pkg.changes["distribution-version"][suite] = suite
515
516         # Ensure there is (still) a target distribution
517         if len(self.pkg.changes["distribution"].keys()) < 1:
518             self.rejects.append("No valid distribution remaining.")
519
520         # Ensure target distributions exist
521         for suite in self.pkg.changes["distribution"].keys():
522             if not Cnf.has_key("Suite::%s" % (suite)):
523                 self.rejects.append("Unknown distribution `%s'." % (suite))
524
525     ###########################################################################
526
527     def binary_file_checks(self, f, session):
528         cnf = Config()
529         entry = self.pkg.files[f]
530
531         # Extract package control information
532         deb_file = utils.open_file(f)
533         try:
534             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
535         except:
536             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
537             deb_file.close()
538             # Can't continue, none of the checks on control would work.
539             return
540
541         # Check for mandantory "Description:"
542         deb_file.seek(0)
543         try:
544             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
545         except:
546             self.rejects.append("%s: Missing Description in binary package" % (f))
547             return
548
549         deb_file.close()
550
551         # Check for mandatory fields
552         for field in [ "Package", "Architecture", "Version" ]:
553             if control.Find(field) == None:
554                 # Can't continue
555                 self.rejects.append("%s: No %s field in control." % (f, field))
556                 return
557
558         # Ensure the package name matches the one give in the .changes
559         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
560             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
561
562         # Validate the package field
563         package = control.Find("Package")
564         if not re_valid_pkg_name.match(package):
565             self.rejects.append("%s: invalid package name '%s'." % (f, package))
566
567         # Validate the version field
568         version = control.Find("Version")
569         if not re_valid_version.match(version):
570             self.rejects.append("%s: invalid version number '%s'." % (f, version))
571
572         # Ensure the architecture of the .deb is one we know about.
573         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
574         architecture = control.Find("Architecture")
575         upload_suite = self.pkg.changes["distribution"].keys()[0]
576
577         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
578             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
579             self.rejects.append("Unknown architecture '%s'." % (architecture))
580
581         # Ensure the architecture of the .deb is one of the ones
582         # listed in the .changes.
583         if not self.pkg.changes["architecture"].has_key(architecture):
584             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
585
586         # Sanity-check the Depends field
587         depends = control.Find("Depends")
588         if depends == '':
589             self.rejects.append("%s: Depends field is empty." % (f))
590
591         # Sanity-check the Provides field
592         provides = control.Find("Provides")
593         if provides:
594             provide = re_spacestrip.sub('', provides)
595             if provide == '':
596                 self.rejects.append("%s: Provides field is empty." % (f))
597             prov_list = provide.split(",")
598             for prov in prov_list:
599                 if not re_valid_pkg_name.match(prov):
600                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
601
602         # Check the section & priority match those given in the .changes (non-fatal)
603         if     control.Find("Section") and entry["section"] != "" \
604            and entry["section"] != control.Find("Section"):
605             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
606                                 (f, control.Find("Section", ""), entry["section"]))
607         if control.Find("Priority") and entry["priority"] != "" \
608            and entry["priority"] != control.Find("Priority"):
609             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
610                                 (f, control.Find("Priority", ""), entry["priority"]))
611
612         entry["package"] = package
613         entry["architecture"] = architecture
614         entry["version"] = version
615         entry["maintainer"] = control.Find("Maintainer", "")
616
617         if f.endswith(".udeb"):
618             self.pkg.files[f]["dbtype"] = "udeb"
619         elif f.endswith(".deb"):
620             self.pkg.files[f]["dbtype"] = "deb"
621         else:
622             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
623
624         entry["source"] = control.Find("Source", entry["package"])
625
626         # Get the source version
627         source = entry["source"]
628         source_version = ""
629
630         if source.find("(") != -1:
631             m = re_extract_src_version.match(source)
632             source = m.group(1)
633             source_version = m.group(2)
634
635         if not source_version:
636             source_version = self.pkg.files[f]["version"]
637
638         entry["source package"] = source
639         entry["source version"] = source_version
640
641         # Ensure the filename matches the contents of the .deb
642         m = re_isadeb.match(f)
643
644         #  package name
645         file_package = m.group(1)
646         if entry["package"] != file_package:
647             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
648                                 (f, file_package, entry["dbtype"], entry["package"]))
649         epochless_version = re_no_epoch.sub('', control.Find("Version"))
650
651         #  version
652         file_version = m.group(2)
653         if epochless_version != file_version:
654             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
655                                 (f, file_version, entry["dbtype"], epochless_version))
656
657         #  architecture
658         file_architecture = m.group(3)
659         if entry["architecture"] != file_architecture:
660             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
661                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
662
663         # Check for existent source
664         source_version = entry["source version"]
665         source_package = entry["source package"]
666         if self.pkg.changes["architecture"].has_key("source"):
667             if source_version != self.pkg.changes["version"]:
668                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
669                                     (source_version, f, self.pkg.changes["version"]))
670         else:
671             # Check in the SQL database
672             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
673                 # Check in one of the other directories
674                 source_epochless_version = re_no_epoch.sub('', source_version)
675                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
676                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
677                     entry["byhand"] = 1
678                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
679                     entry["new"] = 1
680                 else:
681                     dsc_file_exists = False
682                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
683                         if cnf.has_key("Dir::Queue::%s" % (myq)):
684                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
685                                 dsc_file_exists = True
686                                 break
687
688                     if not dsc_file_exists:
689                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
690
691         # Check the version and for file overwrites
692         self.check_binary_against_db(f, session)
693
694         # Temporarily disable contents generation until we change the table storage layout
695         #b = Binary(f)
696         #b.scan_package()
697         #if len(b.rejects) > 0:
698         #    for j in b.rejects:
699         #        self.rejects.append(j)
700
701     def source_file_checks(self, f, session):
702         entry = self.pkg.files[f]
703
704         m = re_issource.match(f)
705         if not m:
706             return
707
708         entry["package"] = m.group(1)
709         entry["version"] = m.group(2)
710         entry["type"] = m.group(3)
711
712         # Ensure the source package name matches the Source filed in the .changes
713         if self.pkg.changes["source"] != entry["package"]:
714             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
715
716         # Ensure the source version matches the version in the .changes file
717         if re_is_orig_source.match(f):
718             changes_version = self.pkg.changes["chopversion2"]
719         else:
720             changes_version = self.pkg.changes["chopversion"]
721
722         if changes_version != entry["version"]:
723             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
724
725         # Ensure the .changes lists source in the Architecture field
726         if not self.pkg.changes["architecture"].has_key("source"):
727             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
728
729         # Check the signature of a .dsc file
730         if entry["type"] == "dsc":
731             # check_signature returns either:
732             #  (None, [list, of, rejects]) or (signature, [])
733             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
734             for j in rejects:
735                 self.rejects.append(j)
736
737         entry["architecture"] = "source"
738
739     def per_suite_file_checks(self, f, suite, session):
740         cnf = Config()
741         entry = self.pkg.files[f]
742
743         # Skip byhand
744         if entry.has_key("byhand"):
745             return
746
747         # Check we have fields we need to do these checks
748         oktogo = True
749         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
750             if not entry.has_key(m):
751                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
752                 oktogo = False
753
754         if not oktogo:
755             return
756
757         # Handle component mappings
758         for m in cnf.ValueList("ComponentMappings"):
759             (source, dest) = m.split()
760             if entry["component"] == source:
761                 entry["original component"] = source
762                 entry["component"] = dest
763
764         # Ensure the component is valid for the target suite
765         if cnf.has_key("Suite:%s::Components" % (suite)) and \
766            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
767             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
768             return
769
770         # Validate the component
771         if not get_component(entry["component"], session):
772             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
773             return
774
775         # See if the package is NEW
776         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
777             entry["new"] = 1
778
779         # Validate the priority
780         if entry["priority"].find('/') != -1:
781             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
782
783         # Determine the location
784         location = cnf["Dir::Pool"]
785         l = get_location(location, entry["component"], session=session)
786         if l is None:
787             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
788             entry["location id"] = -1
789         else:
790             entry["location id"] = l.location_id
791
792         # Check the md5sum & size against existing files (if any)
793         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
794
795         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
796                                          entry["size"], entry["md5sum"], entry["location id"])
797
798         if found is None:
799             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
800         elif found is False and poolfile is not None:
801             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
802         else:
803             if poolfile is None:
804                 entry["files id"] = None
805             else:
806                 entry["files id"] = poolfile.file_id
807
808         # Check for packages that have moved from one component to another
809         entry['suite'] = suite
810         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
811         if res.rowcount > 0:
812             entry["othercomponents"] = res.fetchone()[0]
813
814     def check_files(self, action=True):
815         file_keys = self.pkg.files.keys()
816         holding = Holding()
817         cnf = Config()
818
819         if action:
820             cwd = os.getcwd()
821             os.chdir(self.pkg.directory)
822             for f in file_keys:
823                 ret = holding.copy_to_holding(f)
824                 if ret is not None:
825                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
826
827             os.chdir(cwd)
828
829         # check we already know the changes file
830         # [NB: this check must be done post-suite mapping]
831         base_filename = os.path.basename(self.pkg.changes_file)
832
833         session = DBConn().session()
834
835         try:
836             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
837             # if in the pool or in a queue other than unchecked, reject
838             if (dbc.in_queue is None) \
839                    or (dbc.in_queue is not None
840                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
841                 self.rejects.append("%s file already known to dak" % base_filename)
842         except NoResultFound, e:
843             # not known, good
844             pass
845
846         has_binaries = False
847         has_source = False
848
849         for f, entry in self.pkg.files.items():
850             # Ensure the file does not already exist in one of the accepted directories
851             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
852                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
853                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
854                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
855
856             if not re_taint_free.match(f):
857                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
858
859             # Check the file is readable
860             if os.access(f, os.R_OK) == 0:
861                 # When running in -n, copy_to_holding() won't have
862                 # generated the reject_message, so we need to.
863                 if action:
864                     if os.path.exists(f):
865                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
866                     else:
867                         # Don't directly reject, mark to check later to deal with orig's
868                         # we can find in the pool
869                         self.later_check_files.append(f)
870                 entry["type"] = "unreadable"
871                 continue
872
873             # If it's byhand skip remaining checks
874             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
875                 entry["byhand"] = 1
876                 entry["type"] = "byhand"
877
878             # Checks for a binary package...
879             elif re_isadeb.match(f):
880                 has_binaries = True
881                 entry["type"] = "deb"
882
883                 # This routine appends to self.rejects/warnings as appropriate
884                 self.binary_file_checks(f, session)
885
886             # Checks for a source package...
887             elif re_issource.match(f):
888                 has_source = True
889
890                 # This routine appends to self.rejects/warnings as appropriate
891                 self.source_file_checks(f, session)
892
893             # Not a binary or source package?  Assume byhand...
894             else:
895                 entry["byhand"] = 1
896                 entry["type"] = "byhand"
897
898             # Per-suite file checks
899             entry["oldfiles"] = {}
900             for suite in self.pkg.changes["distribution"].keys():
901                 self.per_suite_file_checks(f, suite, session)
902
903         session.close()
904
905         # If the .changes file says it has source, it must have source.
906         if self.pkg.changes["architecture"].has_key("source"):
907             if not has_source:
908                 self.rejects.append("no source found and Architecture line in changes mention source.")
909
910             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
911                 self.rejects.append("source only uploads are not supported.")
912
913     ###########################################################################
914     def check_dsc(self, action=True, session=None):
915         """Returns bool indicating whether or not the source changes are valid"""
916         # Ensure there is source to check
917         if not self.pkg.changes["architecture"].has_key("source"):
918             return True
919
920         # Find the .dsc
921         dsc_filename = None
922         for f, entry in self.pkg.files.items():
923             if entry["type"] == "dsc":
924                 if dsc_filename:
925                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
926                     return False
927                 else:
928                     dsc_filename = f
929
930         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
931         if not dsc_filename:
932             self.rejects.append("source uploads must contain a dsc file")
933             return False
934
935         # Parse the .dsc file
936         try:
937             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
938         except CantOpenError:
939             # if not -n copy_to_holding() will have done this for us...
940             if not action:
941                 self.rejects.append("%s: can't read file." % (dsc_filename))
942         except ParseChangesError, line:
943             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
944         except InvalidDscError, line:
945             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
946         except ChangesUnicodeError:
947             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
948
949         # Build up the file list of files mentioned by the .dsc
950         try:
951             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
952         except NoFilesFieldError:
953             self.rejects.append("%s: no Files: field." % (dsc_filename))
954             return False
955         except UnknownFormatError, format:
956             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
957             return False
958         except ParseChangesError, line:
959             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
960             return False
961
962         # Enforce mandatory fields
963         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
964             if not self.pkg.dsc.has_key(i):
965                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
966                 return False
967
968         # Validate the source and version fields
969         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
970             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
971         if not re_valid_version.match(self.pkg.dsc["version"]):
972             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
973
974         # Only a limited list of source formats are allowed in each suite
975         for dist in self.pkg.changes["distribution"].keys():
976             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
977             if self.pkg.dsc["format"] not in allowed:
978                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
979
980         # Validate the Maintainer field
981         try:
982             # We ignore the return value
983             fix_maintainer(self.pkg.dsc["maintainer"])
984         except ParseMaintError, msg:
985             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
986                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
987
988         # Validate the build-depends field(s)
989         for field_name in [ "build-depends", "build-depends-indep" ]:
990             field = self.pkg.dsc.get(field_name)
991             if field:
992                 # Have apt try to parse them...
993                 try:
994                     apt_pkg.ParseSrcDepends(field)
995                 except:
996                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
997
998         # Ensure the version number in the .dsc matches the version number in the .changes
999         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1000         changes_version = self.pkg.files[dsc_filename]["version"]
1001
1002         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1003             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1004
1005         # Ensure the Files field contain only what's expected
1006         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1007
1008         # Ensure source is newer than existing source in target suites
1009         session = DBConn().session()
1010         self.check_source_against_db(dsc_filename, session)
1011         self.check_dsc_against_db(dsc_filename, session)
1012         session.close()
1013
1014         # Finally, check if we're missing any files
1015         for f in self.later_check_files:
1016             self.rejects.append("Could not find file %s references in changes" % f)
1017
1018         return True
1019
1020     ###########################################################################
1021
1022     def get_changelog_versions(self, source_dir):
1023         """Extracts a the source package and (optionally) grabs the
1024         version history out of debian/changelog for the BTS."""
1025
1026         cnf = Config()
1027
1028         # Find the .dsc (again)
1029         dsc_filename = None
1030         for f in self.pkg.files.keys():
1031             if self.pkg.files[f]["type"] == "dsc":
1032                 dsc_filename = f
1033
1034         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1035         if not dsc_filename:
1036             return
1037
1038         # Create a symlink mirror of the source files in our temporary directory
1039         for f in self.pkg.files.keys():
1040             m = re_issource.match(f)
1041             if m:
1042                 src = os.path.join(source_dir, f)
1043                 # If a file is missing for whatever reason, give up.
1044                 if not os.path.exists(src):
1045                     return
1046                 ftype = m.group(3)
1047                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1048                    self.pkg.orig_files[f].has_key("path"):
1049                     continue
1050                 dest = os.path.join(os.getcwd(), f)
1051                 os.symlink(src, dest)
1052
1053         # If the orig files are not a part of the upload, create symlinks to the
1054         # existing copies.
1055         for orig_file in self.pkg.orig_files.keys():
1056             if not self.pkg.orig_files[orig_file].has_key("path"):
1057                 continue
1058             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1059             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1060
1061         # Extract the source
1062         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1063         (result, output) = commands.getstatusoutput(cmd)
1064         if (result != 0):
1065             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1066             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1067             return
1068
1069         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1070             return
1071
1072         # Get the upstream version
1073         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1074         if re_strip_revision.search(upstr_version):
1075             upstr_version = re_strip_revision.sub('', upstr_version)
1076
1077         # Ensure the changelog file exists
1078         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1079         if not os.path.exists(changelog_filename):
1080             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1081             return
1082
1083         # Parse the changelog
1084         self.pkg.dsc["bts changelog"] = ""
1085         changelog_file = utils.open_file(changelog_filename)
1086         for line in changelog_file.readlines():
1087             m = re_changelog_versions.match(line)
1088             if m:
1089                 self.pkg.dsc["bts changelog"] += line
1090         changelog_file.close()
1091
1092         # Check we found at least one revision in the changelog
1093         if not self.pkg.dsc["bts changelog"]:
1094             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1095
1096     def check_source(self):
1097         # Bail out if:
1098         #    a) there's no source
1099         # or c) the orig files are MIA
1100         if not self.pkg.changes["architecture"].has_key("source") \
1101            or len(self.pkg.orig_files) == 0:
1102             return
1103
1104         tmpdir = utils.temp_dirname()
1105
1106         # Move into the temporary directory
1107         cwd = os.getcwd()
1108         os.chdir(tmpdir)
1109
1110         # Get the changelog version history
1111         self.get_changelog_versions(cwd)
1112
1113         # Move back and cleanup the temporary tree
1114         os.chdir(cwd)
1115
1116         try:
1117             shutil.rmtree(tmpdir)
1118         except OSError, e:
1119             if e.errno != errno.EACCES:
1120                 print "foobar"
1121                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1122
1123             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1124             # We probably have u-r or u-w directories so chmod everything
1125             # and try again.
1126             cmd = "chmod -R u+rwx %s" % (tmpdir)
1127             result = os.system(cmd)
1128             if result != 0:
1129                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1130             shutil.rmtree(tmpdir)
1131         except Exception, e:
1132             print "foobar2 (%s)" % e
1133             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1134
1135     ###########################################################################
1136     def ensure_hashes(self):
1137         # Make sure we recognise the format of the Files: field in the .changes
1138         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1139         if len(format) == 2:
1140             format = int(format[0]), int(format[1])
1141         else:
1142             format = int(float(format[0])), 0
1143
1144         # We need to deal with the original changes blob, as the fields we need
1145         # might not be in the changes dict serialised into the .dak anymore.
1146         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1147
1148         # Copy the checksums over to the current changes dict.  This will keep
1149         # the existing modifications to it intact.
1150         for field in orig_changes:
1151             if field.startswith('checksums-'):
1152                 self.pkg.changes[field] = orig_changes[field]
1153
1154         # Check for unsupported hashes
1155         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1156             self.rejects.append(j)
1157
1158         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1159             self.rejects.append(j)
1160
1161         # We have to calculate the hash if we have an earlier changes version than
1162         # the hash appears in rather than require it exist in the changes file
1163         for hashname, hashfunc, version in utils.known_hashes:
1164             # TODO: Move _ensure_changes_hash into this class
1165             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1166                 self.rejects.append(j)
1167             if "source" in self.pkg.changes["architecture"]:
1168                 # TODO: Move _ensure_dsc_hash into this class
1169                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1170                     self.rejects.append(j)
1171
1172     def check_hashes(self):
1173         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1174             self.rejects.append(m)
1175
1176         for m in utils.check_size(".changes", self.pkg.files):
1177             self.rejects.append(m)
1178
1179         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1180             self.rejects.append(m)
1181
1182         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1183             self.rejects.append(m)
1184
1185         self.ensure_hashes()
1186
1187     ###########################################################################
1188
1189     def ensure_orig(self, target_dir='.', session=None):
1190         """
1191         Ensures that all orig files mentioned in the changes file are present
1192         in target_dir. If they do not exist, they are symlinked into place.
1193
1194         An list containing the symlinks that were created are returned (so they
1195         can be removed).
1196         """
1197
1198         symlinked = []
1199         cnf = Config()
1200
1201         for filename, entry in self.pkg.dsc_files.iteritems():
1202             if not re_is_orig_source.match(filename):
1203                 # File is not an orig; ignore
1204                 continue
1205
1206             if os.path.exists(filename):
1207                 # File exists, no need to continue
1208                 continue
1209
1210             def symlink_if_valid(path):
1211                 f = utils.open_file(path)
1212                 md5sum = apt_pkg.md5sum(f)
1213                 f.close()
1214
1215                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1216                 expected = (int(entry['size']), entry['md5sum'])
1217
1218                 if fingerprint != expected:
1219                     return False
1220
1221                 dest = os.path.join(target_dir, filename)
1222
1223                 os.symlink(path, dest)
1224                 symlinked.append(dest)
1225
1226                 return True
1227
1228             session_ = session
1229             if session is None:
1230                 session_ = DBConn().session()
1231
1232             found = False
1233
1234             # Look in the pool
1235             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1236                 poolfile_path = os.path.join(
1237                     poolfile.location.path, poolfile.filename
1238                 )
1239
1240                 if symlink_if_valid(poolfile_path):
1241                     found = True
1242                     break
1243
1244             if session is None:
1245                 session_.close()
1246
1247             if found:
1248                 continue
1249
1250             # Look in some other queues for the file
1251             queues = ('New', 'Byhand', 'ProposedUpdates',
1252                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1253
1254             for queue in queues:
1255                 if not cnf.get('Dir::Queue::%s' % queue):
1256                     continue
1257
1258                 queuefile_path = os.path.join(
1259                     cnf['Dir::Queue::%s' % queue], filename
1260                 )
1261
1262                 if not os.path.exists(queuefile_path):
1263                     # Does not exist in this queue
1264                     continue
1265
1266                 if symlink_if_valid(queuefile_path):
1267                     break
1268
1269         return symlinked
1270
1271     ###########################################################################
1272
1273     def check_lintian(self):
1274         """
1275         Extends self.rejects by checking the output of lintian against tags
1276         specified in Dinstall::LintianTags.
1277         """
1278
1279         cnf = Config()
1280
1281         # Don't reject binary uploads
1282         if not self.pkg.changes['architecture'].has_key('source'):
1283             return
1284
1285         # Only check some distributions
1286         for dist in ('unstable', 'experimental'):
1287             if dist in self.pkg.changes['distribution']:
1288                 break
1289         else:
1290             return
1291
1292         # If we do not have a tagfile, don't do anything
1293         tagfile = cnf.get("Dinstall::LintianTags")
1294         if tagfile is None:
1295             return
1296
1297         # Parse the yaml file
1298         sourcefile = file(tagfile, 'r')
1299         sourcecontent = sourcefile.read()
1300         sourcefile.close()
1301
1302         try:
1303             lintiantags = yaml.load(sourcecontent)['lintian']
1304         except yaml.YAMLError, msg:
1305             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1306             return
1307
1308         # Try and find all orig mentioned in the .dsc
1309         symlinked = self.ensure_orig()
1310
1311         # Setup the input file for lintian
1312         fd, temp_filename = utils.temp_filename()
1313         temptagfile = os.fdopen(fd, 'w')
1314         for tags in lintiantags.values():
1315             temptagfile.writelines(['%s\n' % x for x in tags])
1316         temptagfile.close()
1317
1318         try:
1319             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1320                 (temp_filename, self.pkg.changes_file)
1321
1322             result, output = commands.getstatusoutput(cmd)
1323         finally:
1324             # Remove our tempfile and any symlinks we created
1325             os.unlink(temp_filename)
1326
1327             for symlink in symlinked:
1328                 os.unlink(symlink)
1329
1330         if result == 2:
1331             utils.warn("lintian failed for %s [return code: %s]." % \
1332                 (self.pkg.changes_file, result))
1333             utils.warn(utils.prefix_multi_line_string(output, \
1334                 " [possible output:] "))
1335
1336         def log(*txt):
1337             if self.logger:
1338                 self.logger.log(
1339                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1340                 )
1341
1342         # Generate messages
1343         parsed_tags = parse_lintian_output(output)
1344         self.rejects.extend(
1345             generate_reject_messages(parsed_tags, lintiantags, log=log)
1346         )
1347
1348     ###########################################################################
1349     def check_urgency(self):
1350         cnf = Config()
1351         if self.pkg.changes["architecture"].has_key("source"):
1352             if not self.pkg.changes.has_key("urgency"):
1353                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1354             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1355             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1356                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1357                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1358                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1359
1360     ###########################################################################
1361
1362     # Sanity check the time stamps of files inside debs.
1363     # [Files in the near future cause ugly warnings and extreme time
1364     #  travel can cause errors on extraction]
1365
1366     def check_timestamps(self):
1367         Cnf = Config()
1368
1369         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1370         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1371         tar = TarTime(future_cutoff, past_cutoff)
1372
1373         for filename, entry in self.pkg.files.items():
1374             if entry["type"] == "deb":
1375                 tar.reset()
1376                 try:
1377                     deb_file = utils.open_file(filename)
1378                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1379                     deb_file.seek(0)
1380                     try:
1381                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1382                     except SystemError, e:
1383                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1384                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1385                             raise
1386                         deb_file.seek(0)
1387                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1388
1389                     deb_file.close()
1390
1391                     future_files = tar.future_files.keys()
1392                     if future_files:
1393                         num_future_files = len(future_files)
1394                         future_file = future_files[0]
1395                         future_date = tar.future_files[future_file]
1396                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1397                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1398
1399                     ancient_files = tar.ancient_files.keys()
1400                     if ancient_files:
1401                         num_ancient_files = len(ancient_files)
1402                         ancient_file = ancient_files[0]
1403                         ancient_date = tar.ancient_files[ancient_file]
1404                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1405                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1406                 except:
1407                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1408
1409     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1410         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1411             sponsored = False
1412         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1413             sponsored = False
1414             if uid_name == "":
1415                 sponsored = True
1416         else:
1417             sponsored = True
1418             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1419                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1420                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1421                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1422                         self.pkg.changes["sponsoremail"] = uid_email
1423
1424         return sponsored
1425
1426
1427     ###########################################################################
1428     # check_signed_by_key checks
1429     ###########################################################################
1430
1431     def check_signed_by_key(self):
1432         """Ensure the .changes is signed by an authorized uploader."""
1433         session = DBConn().session()
1434
1435         # First of all we check that the person has proper upload permissions
1436         # and that this upload isn't blocked
1437         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1438
1439         if fpr is None:
1440             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1441             return
1442
1443         # TODO: Check that import-keyring adds UIDs properly
1444         if not fpr.uid:
1445             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1446             return
1447
1448         # Check that the fingerprint which uploaded has permission to do so
1449         self.check_upload_permissions(fpr, session)
1450
1451         # Check that this package is not in a transition
1452         self.check_transition(session)
1453
1454         session.close()
1455
1456
1457     def check_upload_permissions(self, fpr, session):
1458         # Check any one-off upload blocks
1459         self.check_upload_blocks(fpr, session)
1460
1461         # Start with DM as a special case
1462         # DM is a special case unfortunately, so we check it first
1463         # (keys with no source access get more access than DMs in one
1464         #  way; DMs can only upload for their packages whether source
1465         #  or binary, whereas keys with no access might be able to
1466         #  upload some binaries)
1467         if fpr.source_acl.access_level == 'dm':
1468             self.check_dm_upload(fpr, session)
1469         else:
1470             # Check source-based permissions for other types
1471             if self.pkg.changes["architecture"].has_key("source") and \
1472                 fpr.source_acl.access_level is None:
1473                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1474                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1475                 self.rejects.append(rej)
1476                 return
1477             # If not a DM, we allow full upload rights
1478             uid_email = "%s@debian.org" % (fpr.uid.uid)
1479             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1480
1481
1482         # Check binary upload permissions
1483         # By this point we know that DMs can't have got here unless they
1484         # are allowed to deal with the package concerned so just apply
1485         # normal checks
1486         if fpr.binary_acl.access_level == 'full':
1487             return
1488
1489         # Otherwise we're in the map case
1490         tmparches = self.pkg.changes["architecture"].copy()
1491         tmparches.pop('source', None)
1492
1493         for bam in fpr.binary_acl_map:
1494             tmparches.pop(bam.architecture.arch_string, None)
1495
1496         if len(tmparches.keys()) > 0:
1497             if fpr.binary_reject:
1498                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1499                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1500                 self.rejects.append(rej)
1501             else:
1502                 # TODO: This is where we'll implement reject vs throw away binaries later
1503                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1504                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1505                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1506                 self.rejects.append(rej)
1507
1508
1509     def check_upload_blocks(self, fpr, session):
1510         """Check whether any upload blocks apply to this source, source
1511            version, uid / fpr combination"""
1512
1513         def block_rej_template(fb):
1514             rej = 'Manual upload block in place for package %s' % fb.source
1515             if fb.version is not None:
1516                 rej += ', version %s' % fb.version
1517             return rej
1518
1519         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1520             # version is None if the block applies to all versions
1521             if fb.version is None or fb.version == self.pkg.changes['version']:
1522                 # Check both fpr and uid - either is enough to cause a reject
1523                 if fb.fpr is not None:
1524                     if fb.fpr.fingerprint == fpr.fingerprint:
1525                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1526                 if fb.uid is not None:
1527                     if fb.uid == fpr.uid:
1528                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1529
1530
1531     def check_dm_upload(self, fpr, session):
1532         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1533         ## none of the uploaded packages are NEW
1534         rej = False
1535         for f in self.pkg.files.keys():
1536             if self.pkg.files[f].has_key("byhand"):
1537                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1538                 rej = True
1539             if self.pkg.files[f].has_key("new"):
1540                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1541                 rej = True
1542
1543         if rej:
1544             return
1545
1546         ## the most recent version of the package uploaded to unstable or
1547         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1548         ## section of its control file
1549         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1550         q = q.join(SrcAssociation)
1551         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1552         q = q.order_by(desc('source.version')).limit(1)
1553
1554         r = q.all()
1555
1556         if len(r) != 1:
1557             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1558             self.rejects.append(rej)
1559             return
1560
1561         r = r[0]
1562         if not r.dm_upload_allowed:
1563             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1564             self.rejects.append(rej)
1565             return
1566
1567         ## the Maintainer: field of the uploaded .changes file corresponds with
1568         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1569         ## uploads)
1570         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1571             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1572
1573         ## the most recent version of the package uploaded to unstable or
1574         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1575         ## non-developer maintainers cannot NMU or hijack packages)
1576
1577         # srcuploaders includes the maintainer
1578         accept = False
1579         for sup in r.srcuploaders:
1580             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1581             # Eww - I hope we never have two people with the same name in Debian
1582             if email == fpr.uid.uid or name == fpr.uid.name:
1583                 accept = True
1584                 break
1585
1586         if not accept:
1587             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1588             return
1589
1590         ## none of the packages are being taken over from other source packages
1591         for b in self.pkg.changes["binary"].keys():
1592             for suite in self.pkg.changes["distribution"].keys():
1593                 q = session.query(DBSource)
1594                 q = q.join(DBBinary).filter_by(package=b)
1595                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1596
1597                 for s in q.all():
1598                     if s.source != self.pkg.changes["source"]:
1599                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1600
1601
1602
1603     def check_transition(self, session):
1604         cnf = Config()
1605
1606         sourcepkg = self.pkg.changes["source"]
1607
1608         # No sourceful upload -> no need to do anything else, direct return
1609         # We also work with unstable uploads, not experimental or those going to some
1610         # proposed-updates queue
1611         if "source" not in self.pkg.changes["architecture"] or \
1612            "unstable" not in self.pkg.changes["distribution"]:
1613             return
1614
1615         # Also only check if there is a file defined (and existant) with
1616         # checks.
1617         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1618         if transpath == "" or not os.path.exists(transpath):
1619             return
1620
1621         # Parse the yaml file
1622         sourcefile = file(transpath, 'r')
1623         sourcecontent = sourcefile.read()
1624         try:
1625             transitions = yaml.load(sourcecontent)
1626         except yaml.YAMLError, msg:
1627             # This shouldn't happen, there is a wrapper to edit the file which
1628             # checks it, but we prefer to be safe than ending up rejecting
1629             # everything.
1630             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1631             return
1632
1633         # Now look through all defined transitions
1634         for trans in transitions:
1635             t = transitions[trans]
1636             source = t["source"]
1637             expected = t["new"]
1638
1639             # Will be None if nothing is in testing.
1640             current = get_source_in_suite(source, "testing", session)
1641             if current is not None:
1642                 compare = apt_pkg.VersionCompare(current.version, expected)
1643
1644             if current is None or compare < 0:
1645                 # This is still valid, the current version in testing is older than
1646                 # the new version we wait for, or there is none in testing yet
1647
1648                 # Check if the source we look at is affected by this.
1649                 if sourcepkg in t['packages']:
1650                     # The source is affected, lets reject it.
1651
1652                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1653                         sourcepkg, trans)
1654
1655                     if current is not None:
1656                         currentlymsg = "at version %s" % (current.version)
1657                     else:
1658                         currentlymsg = "not present in testing"
1659
1660                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1661
1662                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1663 is part of a testing transition designed to get %s migrated (it is
1664 currently %s, we need version %s).  This transition is managed by the
1665 Release Team, and %s is the Release-Team member responsible for it.
1666 Please mail debian-release@lists.debian.org or contact %s directly if you
1667 need further assistance.  You might want to upload to experimental until this
1668 transition is done."""
1669                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1670
1671                     self.rejects.append(rejectmsg)
1672                     return
1673
1674     ###########################################################################
1675     # End check_signed_by_key checks
1676     ###########################################################################
1677
1678     def build_summaries(self):
1679         """ Build a summary of changes the upload introduces. """
1680
1681         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1682
1683         short_summary = summary
1684
1685         # This is for direport's benefit...
1686         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1687
1688         if byhand or new:
1689             summary += "Changes: " + f
1690
1691         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1692
1693         summary += self.announce(short_summary, 0)
1694
1695         return (summary, short_summary)
1696
1697     ###########################################################################
1698
1699     def close_bugs(self, summary, action):
1700         """
1701         Send mail to close bugs as instructed by the closes field in the changes file.
1702         Also add a line to summary if any work was done.
1703
1704         @type summary: string
1705         @param summary: summary text, as given by L{build_summaries}
1706
1707         @type action: bool
1708         @param action: Set to false no real action will be done.
1709
1710         @rtype: string
1711         @return: summary. If action was taken, extended by the list of closed bugs.
1712
1713         """
1714
1715         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1716
1717         bugs = self.pkg.changes["closes"].keys()
1718
1719         if not bugs:
1720             return summary
1721
1722         bugs.sort()
1723         summary += "Closing bugs: "
1724         for bug in bugs:
1725             summary += "%s " % (bug)
1726             if action:
1727                 self.update_subst()
1728                 self.Subst["__BUG_NUMBER__"] = bug
1729                 if self.pkg.changes["distribution"].has_key("stable"):
1730                     self.Subst["__STABLE_WARNING__"] = """
1731 Note that this package is not part of the released stable Debian
1732 distribution.  It may have dependencies on other unreleased software,
1733 or other instabilities.  Please take care if you wish to install it.
1734 The update will eventually make its way into the next released Debian
1735 distribution."""
1736                 else:
1737                     self.Subst["__STABLE_WARNING__"] = ""
1738                 mail_message = utils.TemplateSubst(self.Subst, template)
1739                 utils.send_mail(mail_message)
1740
1741                 # Clear up after ourselves
1742                 del self.Subst["__BUG_NUMBER__"]
1743                 del self.Subst["__STABLE_WARNING__"]
1744
1745         if action and self.logger:
1746             self.logger.log(["closing bugs"] + bugs)
1747
1748         summary += "\n"
1749
1750         return summary
1751
1752     ###########################################################################
1753
1754     def announce(self, short_summary, action):
1755         """
1756         Send an announce mail about a new upload.
1757
1758         @type short_summary: string
1759         @param short_summary: Short summary text to include in the mail
1760
1761         @type action: bool
1762         @param action: Set to false no real action will be done.
1763
1764         @rtype: string
1765         @return: Textstring about action taken.
1766
1767         """
1768
1769         cnf = Config()
1770         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1771
1772         # Only do announcements for source uploads with a recent dpkg-dev installed
1773         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1774            self.pkg.changes["architecture"].has_key("source"):
1775             return ""
1776
1777         lists_done = {}
1778         summary = ""
1779
1780         self.Subst["__SHORT_SUMMARY__"] = short_summary
1781
1782         for dist in self.pkg.changes["distribution"].keys():
1783             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1784             if announce_list == "" or lists_done.has_key(announce_list):
1785                 continue
1786
1787             lists_done[announce_list] = 1
1788             summary += "Announcing to %s\n" % (announce_list)
1789
1790             if action:
1791                 self.update_subst()
1792                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1793                 if cnf.get("Dinstall::TrackingServer") and \
1794                    self.pkg.changes["architecture"].has_key("source"):
1795                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1796                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1797
1798                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1799                 utils.send_mail(mail_message)
1800
1801                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1802
1803         if cnf.FindB("Dinstall::CloseBugs"):
1804             summary = self.close_bugs(summary, action)
1805
1806         del self.Subst["__SHORT_SUMMARY__"]
1807
1808         return summary
1809
1810     ###########################################################################
1811     @session_wrapper
1812     def accept (self, summary, short_summary, session=None):
1813         """
1814         Accept an upload.
1815
1816         This moves all files referenced from the .changes into the pool,
1817         sends the accepted mail, announces to lists, closes bugs and
1818         also checks for override disparities. If enabled it will write out
1819         the version history for the BTS Version Tracking and will finally call
1820         L{queue_build}.
1821
1822         @type summary: string
1823         @param summary: Summary text
1824
1825         @type short_summary: string
1826         @param short_summary: Short summary
1827         """
1828
1829         cnf = Config()
1830         stats = SummaryStats()
1831
1832         print "Installing."
1833         self.logger.log(["installing changes", self.pkg.changes_file])
1834
1835         poolfiles = []
1836
1837         # Add the .dsc file to the DB first
1838         for newfile, entry in self.pkg.files.items():
1839             if entry["type"] == "dsc":
1840                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1841                 for j in pfs:
1842                     poolfiles.append(j)
1843
1844         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1845         for newfile, entry in self.pkg.files.items():
1846             if entry["type"] == "deb":
1847                 poolfiles.append(add_deb_to_db(self, newfile, session))
1848
1849         # If this is a sourceful diff only upload that is moving
1850         # cross-component we need to copy the .orig files into the new
1851         # component too for the same reasons as above.
1852         # XXX: mhy: I think this should be in add_dsc_to_db
1853         if self.pkg.changes["architecture"].has_key("source"):
1854             for orig_file in self.pkg.orig_files.keys():
1855                 if not self.pkg.orig_files[orig_file].has_key("id"):
1856                     continue # Skip if it's not in the pool
1857                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1858                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1859                     continue # Skip if the location didn't change
1860
1861                 # Do the move
1862                 oldf = get_poolfile_by_id(orig_file_id, session)
1863                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1864                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1865                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1866
1867                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1868
1869                 # TODO: Care about size/md5sum collisions etc
1870                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1871
1872                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1873                 if newf is None:
1874                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1875                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1876
1877                     session.flush()
1878
1879                     # Don't reference the old file from this changes
1880                     for p in poolfiles:
1881                         if p.file_id == oldf.file_id:
1882                             poolfiles.remove(p)
1883
1884                     poolfiles.append(newf)
1885
1886                     # Fix up the DSC references
1887                     toremove = []
1888
1889                     for df in source.srcfiles:
1890                         if df.poolfile.file_id == oldf.file_id:
1891                             # Add a new DSC entry and mark the old one for deletion
1892                             # Don't do it in the loop so we don't change the thing we're iterating over
1893                             newdscf = DSCFile()
1894                             newdscf.source_id = source.source_id
1895                             newdscf.poolfile_id = newf.file_id
1896                             session.add(newdscf)
1897
1898                             toremove.append(df)
1899
1900                     for df in toremove:
1901                         session.delete(df)
1902
1903                     # Flush our changes
1904                     session.flush()
1905
1906                     # Make sure that our source object is up-to-date
1907                     session.expire(source)
1908
1909         # Install the files into the pool
1910         for newfile, entry in self.pkg.files.items():
1911             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1912             utils.move(newfile, destination)
1913             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1914             stats.accept_bytes += float(entry["size"])
1915
1916         # Copy the .changes file across for suite which need it.
1917         copy_changes = {}
1918         for suite_name in self.pkg.changes["distribution"].keys():
1919             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1920                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1921
1922         for dest in copy_changes.keys():
1923             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1924
1925         # We're done - commit the database changes
1926         session.commit()
1927         # Our SQL session will automatically start a new transaction after
1928         # the last commit
1929
1930         # Move the .changes into the 'done' directory
1931         utils.move(self.pkg.changes_file,
1932                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1933
1934         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1935             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1936
1937         self.update_subst()
1938         self.Subst["__SUITE__"] = ""
1939         self.Subst["__SUMMARY__"] = summary
1940         mail_message = utils.TemplateSubst(self.Subst,
1941                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1942         utils.send_mail(mail_message)
1943         self.announce(short_summary, 1)
1944
1945         ## Helper stuff for DebBugs Version Tracking
1946         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1947             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1948             # the conditionalization on dsc["bts changelog"] should be
1949             # dropped.
1950
1951             # Write out the version history from the changelog
1952             if self.pkg.changes["architecture"].has_key("source") and \
1953                self.pkg.dsc.has_key("bts changelog"):
1954
1955                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1956                 version_history = os.fdopen(fd, 'w')
1957                 version_history.write(self.pkg.dsc["bts changelog"])
1958                 version_history.close()
1959                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1960                                       self.pkg.changes_file[:-8]+".versions")
1961                 os.rename(temp_filename, filename)
1962                 os.chmod(filename, 0644)
1963
1964             # Write out the binary -> source mapping.
1965             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1966             debinfo = os.fdopen(fd, 'w')
1967             for name, entry in sorted(self.pkg.files.items()):
1968                 if entry["type"] == "deb":
1969                     line = " ".join([entry["package"], entry["version"],
1970                                      entry["architecture"], entry["source package"],
1971                                      entry["source version"]])
1972                     debinfo.write(line+"\n")
1973             debinfo.close()
1974             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1975                                   self.pkg.changes_file[:-8]+".debinfo")
1976             os.rename(temp_filename, filename)
1977             os.chmod(filename, 0644)
1978
1979         session.commit()
1980
1981         # Set up our copy queues (e.g. buildd queues)
1982         for suite_name in self.pkg.changes["distribution"].keys():
1983             suite = get_suite(suite_name, session)
1984             for q in suite.copy_queues:
1985                 for f in poolfiles:
1986                     q.add_file_from_pool(f)
1987
1988         session.commit()
1989
1990         # Finally...
1991         stats.accept_count += 1
1992
1993     def check_override(self):
1994         """
1995         Checks override entries for validity. Mails "Override disparity" warnings,
1996         if that feature is enabled.
1997
1998         Abandons the check if
1999           - override disparity checks are disabled
2000           - mail sending is disabled
2001         """
2002
2003         cnf = Config()
2004
2005         # Abandon the check if override disparity checks have been disabled
2006         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2007             return
2008
2009         summary = self.pkg.check_override()
2010
2011         if summary == "":
2012             return
2013
2014         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2015
2016         self.update_subst()
2017         self.Subst["__SUMMARY__"] = summary
2018         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2019         utils.send_mail(mail_message)
2020         del self.Subst["__SUMMARY__"]
2021
2022     ###########################################################################
2023
2024     def remove(self, from_dir=None):
2025         """
2026         Used (for instance) in p-u to remove the package from unchecked
2027
2028         Also removes the package from holding area.
2029         """
2030         if from_dir is None:
2031             from_dir = self.pkg.directory
2032         h = Holding()
2033
2034         for f in self.pkg.files.keys():
2035             os.unlink(os.path.join(from_dir, f))
2036             if os.path.exists(os.path.join(h.holding_dir, f)):
2037                 os.unlink(os.path.join(h.holding_dir, f))
2038
2039         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2040         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2041             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2042
2043     ###########################################################################
2044
2045     def move_to_queue (self, queue):
2046         """
2047         Move files to a destination queue using the permissions in the table
2048         """
2049         h = Holding()
2050         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2051                    queue.path, perms=int(queue.change_perms, 8))
2052         for f in self.pkg.files.keys():
2053             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2054
2055     ###########################################################################
2056
2057     def force_reject(self, reject_files):
2058         """
2059         Forcefully move files from the current directory to the
2060         reject directory.  If any file already exists in the reject
2061         directory it will be moved to the morgue to make way for
2062         the new file.
2063
2064         @type files: dict
2065         @param files: file dictionary
2066
2067         """
2068
2069         cnf = Config()
2070
2071         for file_entry in reject_files:
2072             # Skip any files which don't exist or which we don't have permission to copy.
2073             if os.access(file_entry, os.R_OK) == 0:
2074                 continue
2075
2076             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2077
2078             try:
2079                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2080             except OSError, e:
2081                 # File exists?  Let's try and move it to the morgue
2082                 if e.errno == errno.EEXIST:
2083                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2084                     try:
2085                         morgue_file = utils.find_next_free(morgue_file)
2086                     except NoFreeFilenameError:
2087                         # Something's either gone badly Pete Tong, or
2088                         # someone is trying to exploit us.
2089                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2090                         return
2091                     utils.move(dest_file, morgue_file, perms=0660)
2092                     try:
2093                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2094                     except OSError, e:
2095                         # Likewise
2096                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2097                         return
2098                 else:
2099                     raise
2100             # If we got here, we own the destination file, so we can
2101             # safely overwrite it.
2102             utils.move(file_entry, dest_file, 1, perms=0660)
2103             os.close(dest_fd)
2104
2105     ###########################################################################
2106     def do_reject (self, manual=0, reject_message="", notes=""):
2107         """
2108         Reject an upload. If called without a reject message or C{manual} is
2109         true, spawn an editor so the user can write one.
2110
2111         @type manual: bool
2112         @param manual: manual or automated rejection
2113
2114         @type reject_message: string
2115         @param reject_message: A reject message
2116
2117         @return: 0
2118
2119         """
2120         # If we weren't given a manual rejection message, spawn an
2121         # editor so the user can add one in...
2122         if manual and not reject_message:
2123             (fd, temp_filename) = utils.temp_filename()
2124             temp_file = os.fdopen(fd, 'w')
2125             if len(notes) > 0:
2126                 for note in notes:
2127                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2128                                     % (note.author, note.version, note.notedate, note.comment))
2129             temp_file.close()
2130             editor = os.environ.get("EDITOR","vi")
2131             answer = 'E'
2132             while answer == 'E':
2133                 os.system("%s %s" % (editor, temp_filename))
2134                 temp_fh = utils.open_file(temp_filename)
2135                 reject_message = "".join(temp_fh.readlines())
2136                 temp_fh.close()
2137                 print "Reject message:"
2138                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2139                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2140                 answer = "XXX"
2141                 while prompt.find(answer) == -1:
2142                     answer = utils.our_raw_input(prompt)
2143                     m = re_default_answer.search(prompt)
2144                     if answer == "":
2145                         answer = m.group(1)
2146                     answer = answer[:1].upper()
2147             os.unlink(temp_filename)
2148             if answer == 'A':
2149                 return 1
2150             elif answer == 'Q':
2151                 sys.exit(0)
2152
2153         print "Rejecting.\n"
2154
2155         cnf = Config()
2156
2157         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2158         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2159
2160         # Move all the files into the reject directory
2161         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2162         self.force_reject(reject_files)
2163
2164         # If we fail here someone is probably trying to exploit the race
2165         # so let's just raise an exception ...
2166         if os.path.exists(reason_filename):
2167             os.unlink(reason_filename)
2168         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2169
2170         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2171
2172         self.update_subst()
2173         if not manual:
2174             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2175             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2176             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2177             os.write(reason_fd, reject_message)
2178             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2179         else:
2180             # Build up the rejection email
2181             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2182             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2183             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2184             self.Subst["__REJECT_MESSAGE__"] = ""
2185             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2186             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2187             # Write the rejection email out as the <foo>.reason file
2188             os.write(reason_fd, reject_mail_message)
2189
2190         del self.Subst["__REJECTOR_ADDRESS__"]
2191         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2192         del self.Subst["__CC__"]
2193
2194         os.close(reason_fd)
2195
2196         # Send the rejection mail
2197         utils.send_mail(reject_mail_message)
2198
2199         if self.logger:
2200             self.logger.log(["rejected", self.pkg.changes_file])
2201
2202         return 0
2203
2204     ################################################################################
2205     def in_override_p(self, package, component, suite, binary_type, filename, session):
2206         """
2207         Check if a package already has override entries in the DB
2208
2209         @type package: string
2210         @param package: package name
2211
2212         @type component: string
2213         @param component: database id of the component
2214
2215         @type suite: int
2216         @param suite: database id of the suite
2217
2218         @type binary_type: string
2219         @param binary_type: type of the package
2220
2221         @type filename: string
2222         @param filename: filename we check
2223
2224         @return: the database result. But noone cares anyway.
2225
2226         """
2227
2228         cnf = Config()
2229
2230         if binary_type == "": # must be source
2231             file_type = "dsc"
2232         else:
2233             file_type = binary_type
2234
2235         # Override suite name; used for example with proposed-updates
2236         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2237             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2238
2239         result = get_override(package, suite, component, file_type, session)
2240
2241         # If checking for a source package fall back on the binary override type
2242         if file_type == "dsc" and len(result) < 1:
2243             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2244
2245         # Remember the section and priority so we can check them later if appropriate
2246         if len(result) > 0:
2247             result = result[0]
2248             self.pkg.files[filename]["override section"] = result.section.section
2249             self.pkg.files[filename]["override priority"] = result.priority.priority
2250             return result
2251
2252         return None
2253
2254     ################################################################################
2255     def get_anyversion(self, sv_list, suite):
2256         """
2257         @type sv_list: list
2258         @param sv_list: list of (suite, version) tuples to check
2259
2260         @type suite: string
2261         @param suite: suite name
2262
2263         Description: TODO
2264         """
2265         Cnf = Config()
2266         anyversion = None
2267         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2268         for (s, v) in sv_list:
2269             if s in [ x.lower() for x in anysuite ]:
2270                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2271                     anyversion = v
2272
2273         return anyversion
2274
2275     ################################################################################
2276
2277     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2278         """
2279         @type sv_list: list
2280         @param sv_list: list of (suite, version) tuples to check
2281
2282         @type filename: string
2283         @param filename: XXX
2284
2285         @type new_version: string
2286         @param new_version: XXX
2287
2288         Ensure versions are newer than existing packages in target
2289         suites and that cross-suite version checking rules as
2290         set out in the conf file are satisfied.
2291         """
2292
2293         cnf = Config()
2294
2295         # Check versions for each target suite
2296         for target_suite in self.pkg.changes["distribution"].keys():
2297             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2298             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2299
2300             # Enforce "must be newer than target suite" even if conffile omits it
2301             if target_suite not in must_be_newer_than:
2302                 must_be_newer_than.append(target_suite)
2303
2304             for (suite, existent_version) in sv_list:
2305                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2306
2307                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2308                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2309
2310                 if suite in must_be_older_than and vercmp > -1:
2311                     cansave = 0
2312
2313                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2314                         # we really use the other suite, ignoring the conflicting one ...
2315                         addsuite = self.pkg.changes["distribution-version"][suite]
2316
2317                         add_version = self.get_anyversion(sv_list, addsuite)
2318                         target_version = self.get_anyversion(sv_list, target_suite)
2319
2320                         if not add_version:
2321                             # not add_version can only happen if we map to a suite
2322                             # that doesn't enhance the suite we're propup'ing from.
2323                             # so "propup-ver x a b c; map a d" is a problem only if
2324                             # d doesn't enhance a.
2325                             #
2326                             # i think we could always propagate in this case, rather
2327                             # than complaining. either way, this isn't a REJECT issue
2328                             #
2329                             # And - we really should complain to the dorks who configured dak
2330                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2331                             self.pkg.changes.setdefault("propdistribution", {})
2332                             self.pkg.changes["propdistribution"][addsuite] = 1
2333                             cansave = 1
2334                         elif not target_version:
2335                             # not targets_version is true when the package is NEW
2336                             # we could just stick with the "...old version..." REJECT
2337                             # for this, I think.
2338                             self.rejects.append("Won't propogate NEW packages.")
2339                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2340                             # propogation would be redundant. no need to reject though.
2341                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2342                             cansave = 1
2343                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2344                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2345                             # propogate!!
2346                             self.warnings.append("Propogating upload to %s" % (addsuite))
2347                             self.pkg.changes.setdefault("propdistribution", {})
2348                             self.pkg.changes["propdistribution"][addsuite] = 1
2349                             cansave = 1
2350
2351                     if not cansave:
2352                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2353
2354     ################################################################################
2355     def check_binary_against_db(self, filename, session):
2356         # Ensure version is sane
2357         q = session.query(BinAssociation)
2358         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2359         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2360
2361         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2362                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2363
2364         # Check for any existing copies of the file
2365         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2366         q = q.filter_by(version=self.pkg.files[filename]["version"])
2367         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2368
2369         if q.count() > 0:
2370             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2371
2372     ################################################################################
2373
2374     def check_source_against_db(self, filename, session):
2375         source = self.pkg.dsc.get("source")
2376         version = self.pkg.dsc.get("version")
2377
2378         # Ensure version is sane
2379         q = session.query(SrcAssociation)
2380         q = q.join(DBSource).filter(DBSource.source==source)
2381
2382         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2383                                        filename, version, sourceful=True)
2384
2385     ################################################################################
2386     def check_dsc_against_db(self, filename, session):
2387         """
2388
2389         @warning: NB: this function can remove entries from the 'files' index [if
2390          the orig tarball is a duplicate of the one in the archive]; if
2391          you're iterating over 'files' and call this function as part of
2392          the loop, be sure to add a check to the top of the loop to
2393          ensure you haven't just tried to dereference the deleted entry.
2394
2395         """
2396
2397         Cnf = Config()
2398         self.pkg.orig_files = {} # XXX: do we need to clear it?
2399         orig_files = self.pkg.orig_files
2400
2401         # Try and find all files mentioned in the .dsc.  This has
2402         # to work harder to cope with the multiple possible
2403         # locations of an .orig.tar.gz.
2404         # The ordering on the select is needed to pick the newest orig
2405         # when it exists in multiple places.
2406         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2407             found = None
2408             if self.pkg.files.has_key(dsc_name):
2409                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2410                 actual_size = int(self.pkg.files[dsc_name]["size"])
2411                 found = "%s in incoming" % (dsc_name)
2412
2413                 # Check the file does not already exist in the archive
2414                 ql = get_poolfile_like_name(dsc_name, session)
2415
2416                 # Strip out anything that isn't '%s' or '/%s$'
2417                 for i in ql:
2418                     if not i.filename.endswith(dsc_name):
2419                         ql.remove(i)
2420
2421                 # "[dak] has not broken them.  [dak] has fixed a
2422                 # brokenness.  Your crappy hack exploited a bug in
2423                 # the old dinstall.
2424                 #
2425                 # "(Come on!  I thought it was always obvious that
2426                 # one just doesn't release different files with
2427                 # the same name and version.)"
2428                 #                        -- ajk@ on d-devel@l.d.o
2429
2430                 if len(ql) > 0:
2431                     # Ignore exact matches for .orig.tar.gz
2432                     match = 0
2433                     if re_is_orig_source.match(dsc_name):
2434                         for i in ql:
2435                             if self.pkg.files.has_key(dsc_name) and \
2436                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2437                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2438                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2439                                 # TODO: Don't delete the entry, just mark it as not needed
2440                                 # This would fix the stupidity of changing something we often iterate over
2441                                 # whilst we're doing it
2442                                 del self.pkg.files[dsc_name]
2443                                 dsc_entry["files id"] = i.file_id
2444                                 if not orig_files.has_key(dsc_name):
2445                                     orig_files[dsc_name] = {}
2446                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2447                                 match = 1
2448
2449                                 # Don't bitch that we couldn't find this file later
2450                                 try:
2451                                     self.later_check_files.remove(dsc_name)
2452                                 except ValueError:
2453                                     pass
2454
2455
2456                     if not match:
2457                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2458
2459             elif re_is_orig_source.match(dsc_name):
2460                 # Check in the pool
2461                 ql = get_poolfile_like_name(dsc_name, session)
2462
2463                 # Strip out anything that isn't '%s' or '/%s$'
2464                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2465                 for i in ql:
2466                     if not i.filename.endswith(dsc_name):
2467                         ql.remove(i)
2468
2469                 if len(ql) > 0:
2470                     # Unfortunately, we may get more than one match here if,
2471                     # for example, the package was in potato but had an -sa
2472                     # upload in woody.  So we need to choose the right one.
2473
2474                     # default to something sane in case we don't match any or have only one
2475                     x = ql[0]
2476
2477                     if len(ql) > 1:
2478                         for i in ql:
2479                             old_file = os.path.join(i.location.path, i.filename)
2480                             old_file_fh = utils.open_file(old_file)
2481                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2482                             old_file_fh.close()
2483                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2484                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2485                                 x = i
2486
2487                     old_file = os.path.join(i.location.path, i.filename)
2488                     old_file_fh = utils.open_file(old_file)
2489                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2490                     old_file_fh.close()
2491                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2492                     found = old_file
2493                     suite_type = x.location.archive_type
2494                     # need this for updating dsc_files in install()
2495                     dsc_entry["files id"] = x.file_id
2496                     # See install() in process-accepted...
2497                     if not orig_files.has_key(dsc_name):
2498                         orig_files[dsc_name] = {}
2499                     orig_files[dsc_name]["id"] = x.file_id
2500                     orig_files[dsc_name]["path"] = old_file
2501                     orig_files[dsc_name]["location"] = x.location.location_id
2502                 else:
2503                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2504                     # Not there? Check the queue directories...
2505                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2506                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2507                             continue
2508                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2509                         if os.path.exists(in_otherdir):
2510                             in_otherdir_fh = utils.open_file(in_otherdir)
2511                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2512                             in_otherdir_fh.close()
2513                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2514                             found = in_otherdir
2515                             if not orig_files.has_key(dsc_name):
2516                                 orig_files[dsc_name] = {}
2517                             orig_files[dsc_name]["path"] = in_otherdir
2518
2519                     if not found:
2520                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2521                         continue
2522             else:
2523                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2524                 continue
2525             if actual_md5 != dsc_entry["md5sum"]:
2526                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2527             if actual_size != int(dsc_entry["size"]):
2528                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2529
2530     ################################################################################
2531     # This is used by process-new and process-holding to recheck a changes file
2532     # at the time we're running.  It mainly wraps various other internal functions
2533     # and is similar to accepted_checks - these should probably be tidied up
2534     # and combined
2535     def recheck(self, session):
2536         cnf = Config()
2537         for f in self.pkg.files.keys():
2538             # The .orig.tar.gz can disappear out from under us is it's a
2539             # duplicate of one in the archive.
2540             if not self.pkg.files.has_key(f):
2541                 continue
2542
2543             entry = self.pkg.files[f]
2544
2545             # Check that the source still exists
2546             if entry["type"] == "deb":
2547                 source_version = entry["source version"]
2548                 source_package = entry["source package"]
2549                 if not self.pkg.changes["architecture"].has_key("source") \
2550                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2551                     source_epochless_version = re_no_epoch.sub('', source_version)
2552                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2553                     found = False
2554                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2555                         if cnf.has_key("Dir::Queue::%s" % (q)):
2556                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2557                                 found = True
2558                     if not found:
2559                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2560
2561             # Version and file overwrite checks
2562             if entry["type"] == "deb":
2563                 self.check_binary_against_db(f, session)
2564             elif entry["type"] == "dsc":
2565                 self.check_source_against_db(f, session)
2566                 self.check_dsc_against_db(f, session)
2567
2568     ################################################################################
2569     def accepted_checks(self, overwrite_checks, session):
2570         # Recheck anything that relies on the database; since that's not
2571         # frozen between accept and our run time when called from p-a.
2572
2573         # overwrite_checks is set to False when installing to stable/oldstable
2574
2575         propogate={}
2576         nopropogate={}
2577
2578         # Find the .dsc (again)
2579         dsc_filename = None
2580         for f in self.pkg.files.keys():
2581             if self.pkg.files[f]["type"] == "dsc":
2582                 dsc_filename = f
2583
2584         for checkfile in self.pkg.files.keys():
2585             # The .orig.tar.gz can disappear out from under us is it's a
2586             # duplicate of one in the archive.
2587             if not self.pkg.files.has_key(checkfile):
2588                 continue
2589
2590             entry = self.pkg.files[checkfile]
2591
2592             # Check that the source still exists
2593             if entry["type"] == "deb":
2594                 source_version = entry["source version"]
2595                 source_package = entry["source package"]
2596                 if not self.pkg.changes["architecture"].has_key("source") \
2597                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2598                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2599
2600             # Version and file overwrite checks
2601             if overwrite_checks:
2602                 if entry["type"] == "deb":
2603                     self.check_binary_against_db(checkfile, session)
2604                 elif entry["type"] == "dsc":
2605                     self.check_source_against_db(checkfile, session)
2606                     self.check_dsc_against_db(dsc_filename, session)
2607
2608             # propogate in the case it is in the override tables:
2609             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2610                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2611                     propogate[suite] = 1
2612                 else:
2613                     nopropogate[suite] = 1
2614
2615         for suite in propogate.keys():
2616             if suite in nopropogate:
2617                 continue
2618             self.pkg.changes["distribution"][suite] = 1
2619
2620         for checkfile in self.pkg.files.keys():
2621             # Check the package is still in the override tables
2622             for suite in self.pkg.changes["distribution"].keys():
2623                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2624                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2625
2626     ################################################################################
2627     # This is not really a reject, but an unaccept, but since a) the code for
2628     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2629     # extremely rare, for now we'll go with whining at our admin folks...
2630
2631     def do_unaccept(self):
2632         cnf = Config()
2633
2634         self.update_subst()
2635         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2636         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2637         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2638         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2639         if cnf.has_key("Dinstall::Bcc"):
2640             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2641
2642         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2643
2644         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2645
2646         # Write the rejection email out as the <foo>.reason file
2647         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2648         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2649
2650         # If we fail here someone is probably trying to exploit the race
2651         # so let's just raise an exception ...
2652         if os.path.exists(reject_filename):
2653             os.unlink(reject_filename)
2654
2655         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2656         os.write(fd, reject_mail_message)
2657         os.close(fd)
2658
2659         utils.send_mail(reject_mail_message)
2660
2661         del self.Subst["__REJECTOR_ADDRESS__"]
2662         del self.Subst["__REJECT_MESSAGE__"]
2663         del self.Subst["__CC__"]
2664
2665     ################################################################################
2666     # If any file of an upload has a recent mtime then chances are good
2667     # the file is still being uploaded.
2668
2669     def upload_too_new(self):
2670         cnf = Config()
2671         too_new = False
2672         # Move back to the original directory to get accurate time stamps
2673         cwd = os.getcwd()
2674         os.chdir(self.pkg.directory)
2675         file_list = self.pkg.files.keys()
2676         file_list.extend(self.pkg.dsc_files.keys())
2677         file_list.append(self.pkg.changes_file)
2678         for f in file_list:
2679             try:
2680                 last_modified = time.time()-os.path.getmtime(f)
2681                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2682                     too_new = True
2683                     break
2684             except:
2685                 pass
2686
2687         os.chdir(cwd)
2688         return too_new