]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
d5057a6aa775255d55a764eee3ad18af990de782
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118 #        if f["type"] == "byhand":
119 #            continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     # Fix up the list of target suites
152     cnf = Config()
153     for suite in changes["suite"].keys():
154         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155         if override:
156             (olderr, newerr) = (get_suite(suite, session) == None,
157                                 get_suite(override, session) == None)
158             if olderr or newerr:
159                 (oinv, newinv) = ("", "")
160                 if olderr: oinv = "invalid "
161                 if newerr: ninv = "invalid "
162                 print "warning: overriding %ssuite %s to %ssuite %s" % (
163                         oinv, suite, ninv, override)
164             del changes["suite"][suite]
165             changes["suite"][override] = 1
166
167     for suite in changes["suite"].keys():
168         for pkg in new.keys():
169             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170             if len(ql) > 0:
171                 for file_entry in new[pkg]["files"]:
172                     if files[file_entry].has_key("new"):
173                         del files[file_entry]["new"]
174                 del new[pkg]
175
176     if warn:
177         for s in ['stable', 'oldstable']:
178             if changes["suite"].has_key(s):
179                 print "WARNING: overrides will be added for %s!" % s
180         for pkg in new.keys():
181             if new[pkg].has_key("othercomponents"):
182                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
183
184     session.close()
185
186     return new
187
188 ################################################################################
189
190 def check_valid(new):
191     """
192     Check if section and priority for NEW packages exist in database.
193     Additionally does sanity checks:
194       - debian-installer packages have to be udeb (or source)
195       - non debian-installer packages can not be udeb
196       - source priority can only be assigned to dsc file types
197
198     @type new: dict
199     @param new: Dict of new packages with their section, priority and type.
200
201     """
202     for pkg in new.keys():
203         section_name = new[pkg]["section"]
204         priority_name = new[pkg]["priority"]
205         file_type = new[pkg]["type"]
206
207         section = get_section(section_name)
208         if section is None:
209             new[pkg]["section id"] = -1
210         else:
211             new[pkg]["section id"] = section.section_id
212
213         priority = get_priority(priority_name)
214         if priority is None:
215             new[pkg]["priority id"] = -1
216         else:
217             new[pkg]["priority id"] = priority.priority_id
218
219         # Sanity checks
220         di = section_name.find("debian-installer") != -1
221
222         # If d-i, we must be udeb and vice-versa
223         if     (di and file_type not in ("udeb", "dsc")) or \
224            (not di and file_type == "udeb"):
225             new[pkg]["section id"] = -1
226
227         # If dsc we need to be source and vice-versa
228         if (priority == "source" and file_type != "dsc") or \
229            (priority != "source" and file_type == "dsc"):
230             new[pkg]["priority id"] = -1
231
232 ###############################################################################
233
234 def check_status(files):
235     new = byhand = 0
236     for f in files.keys():
237         if files[f].has_key("byhand"):
238             byhand = 1
239         elif files[f].has_key("new"):
240             new = 1
241     return (new, byhand)
242
243 ###############################################################################
244
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247     def __init__(self, future_cutoff, past_cutoff):
248         self.reset()
249         self.future_cutoff = future_cutoff
250         self.past_cutoff = past_cutoff
251
252     def reset(self):
253         self.future_files = {}
254         self.ancient_files = {}
255
256     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257         if MTime > self.future_cutoff:
258             self.future_files[Name] = MTime
259         if MTime < self.past_cutoff:
260             self.ancient_files[Name] = MTime
261
262 ###############################################################################
263
264 class Upload(object):
265     """
266     Everything that has to do with an upload processed.
267
268     """
269     def __init__(self):
270         self.logger = None
271         self.pkg = Changes()
272         self.reset()
273
274     ###########################################################################
275
276     def reset (self):
277         """ Reset a number of internal variables."""
278
279         # Initialize the substitution template map
280         cnf = Config()
281         self.Subst = {}
282         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286
287         self.rejects = []
288         self.warnings = []
289         self.notes = []
290
291         self.pkg.reset()
292
293     def package_info(self):
294         """
295         Format various messages from this Upload to send to the maintainer.
296         """
297
298         msgs = (
299             ('Reject Reasons', self.rejects),
300             ('Warnings', self.warnings),
301             ('Notes', self.notes),
302         )
303
304         msg = ''
305         for title, messages in msgs:
306             if messages:
307                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
308         msg += '\n'
309
310         return msg
311
312     ###########################################################################
313     def update_subst(self):
314         """ Set up the per-package template substitution mappings """
315
316         cnf = Config()
317
318         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
319         if not self.pkg.changes.has_key("architecture") or not \
320            isinstance(self.pkg.changes["architecture"], dict):
321             self.pkg.changes["architecture"] = { "Unknown" : "" }
322
323         # and maintainer2047 may not exist.
324         if not self.pkg.changes.has_key("maintainer2047"):
325             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
326
327         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
328         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
329         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
330
331         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
332         if self.pkg.changes["architecture"].has_key("source") and \
333            self.pkg.changes["changedby822"] != "" and \
334            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
335
336             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
337             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
338             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
339         else:
340             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
341             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
342             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
343
344         if "sponsoremail" in self.pkg.changes:
345             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
346
347         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
348             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
349
350         # Apply any global override of the Maintainer field
351         if cnf.get("Dinstall::OverrideMaintainer"):
352             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
353             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
354
355         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
356         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
357         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
358
359     ###########################################################################
360     def load_changes(self, filename):
361         """
362         @rtype: boolean
363         @rvalue: whether the changes file was valid or not.  We may want to
364                  reject even if this is True (see what gets put in self.rejects).
365                  This is simply to prevent us even trying things later which will
366                  fail because we couldn't properly parse the file.
367         """
368         Cnf = Config()
369         self.pkg.changes_file = filename
370
371         # Parse the .changes field into a dictionary
372         try:
373             self.pkg.changes.update(parse_changes(filename))
374         except CantOpenError:
375             self.rejects.append("%s: can't read file." % (filename))
376             return False
377         except ParseChangesError, line:
378             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
379             return False
380         except ChangesUnicodeError:
381             self.rejects.append("%s: changes file not proper utf-8" % (filename))
382             return False
383
384         # Parse the Files field from the .changes into another dictionary
385         try:
386             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
387         except ParseChangesError, line:
388             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
389             return False
390         except UnknownFormatError, format:
391             self.rejects.append("%s: unknown format '%s'." % (filename, format))
392             return False
393
394         # Check for mandatory fields
395         for i in ("distribution", "source", "binary", "architecture",
396                   "version", "maintainer", "files", "changes", "description"):
397             if not self.pkg.changes.has_key(i):
398                 # Avoid undefined errors later
399                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
400                 return False
401
402         # Strip a source version in brackets from the source field
403         if re_strip_srcver.search(self.pkg.changes["source"]):
404             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
405
406         # Ensure the source field is a valid package name.
407         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
408             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
409
410         # Split multi-value fields into a lower-level dictionary
411         for i in ("architecture", "distribution", "binary", "closes"):
412             o = self.pkg.changes.get(i, "")
413             if o != "":
414                 del self.pkg.changes[i]
415
416             self.pkg.changes[i] = {}
417
418             for j in o.split():
419                 self.pkg.changes[i][j] = 1
420
421         # Fix the Maintainer: field to be RFC822/2047 compatible
422         try:
423             (self.pkg.changes["maintainer822"],
424              self.pkg.changes["maintainer2047"],
425              self.pkg.changes["maintainername"],
426              self.pkg.changes["maintaineremail"]) = \
427                    fix_maintainer (self.pkg.changes["maintainer"])
428         except ParseMaintError, msg:
429             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
430                    % (filename, self.pkg.changes["maintainer"], msg))
431
432         # ...likewise for the Changed-By: field if it exists.
433         try:
434             (self.pkg.changes["changedby822"],
435              self.pkg.changes["changedby2047"],
436              self.pkg.changes["changedbyname"],
437              self.pkg.changes["changedbyemail"]) = \
438                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
439         except ParseMaintError, msg:
440             self.pkg.changes["changedby822"] = ""
441             self.pkg.changes["changedby2047"] = ""
442             self.pkg.changes["changedbyname"] = ""
443             self.pkg.changes["changedbyemail"] = ""
444
445             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
446                    % (filename, self.pkg.changes["changed-by"], msg))
447
448         # Ensure all the values in Closes: are numbers
449         if self.pkg.changes.has_key("closes"):
450             for i in self.pkg.changes["closes"].keys():
451                 if re_isanum.match (i) == None:
452                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
453
454         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
455         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
456         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
457
458         # Check the .changes is non-empty
459         if not self.pkg.files:
460             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
461             return False
462
463         # Changes was syntactically valid even if we'll reject
464         return True
465
466     ###########################################################################
467
468     def check_distributions(self):
469         "Check and map the Distribution field"
470
471         Cnf = Config()
472
473         # Handle suite mappings
474         for m in Cnf.ValueList("SuiteMappings"):
475             args = m.split()
476             mtype = args[0]
477             if mtype == "map" or mtype == "silent-map":
478                 (source, dest) = args[1:3]
479                 if self.pkg.changes["distribution"].has_key(source):
480                     del self.pkg.changes["distribution"][source]
481                     self.pkg.changes["distribution"][dest] = 1
482                     if mtype != "silent-map":
483                         self.notes.append("Mapping %s to %s." % (source, dest))
484                 if self.pkg.changes.has_key("distribution-version"):
485                     if self.pkg.changes["distribution-version"].has_key(source):
486                         self.pkg.changes["distribution-version"][source]=dest
487             elif mtype == "map-unreleased":
488                 (source, dest) = args[1:3]
489                 if self.pkg.changes["distribution"].has_key(source):
490                     for arch in self.pkg.changes["architecture"].keys():
491                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
492                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
493                             del self.pkg.changes["distribution"][source]
494                             self.pkg.changes["distribution"][dest] = 1
495                             break
496             elif mtype == "ignore":
497                 suite = args[1]
498                 if self.pkg.changes["distribution"].has_key(suite):
499                     del self.pkg.changes["distribution"][suite]
500                     self.warnings.append("Ignoring %s as a target suite." % (suite))
501             elif mtype == "reject":
502                 suite = args[1]
503                 if self.pkg.changes["distribution"].has_key(suite):
504                     self.rejects.append("Uploads to %s are not accepted." % (suite))
505             elif mtype == "propup-version":
506                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
507                 #
508                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
509                 if self.pkg.changes["distribution"].has_key(args[1]):
510                     self.pkg.changes.setdefault("distribution-version", {})
511                     for suite in args[2:]:
512                         self.pkg.changes["distribution-version"][suite] = suite
513
514         # Ensure there is (still) a target distribution
515         if len(self.pkg.changes["distribution"].keys()) < 1:
516             self.rejects.append("No valid distribution remaining.")
517
518         # Ensure target distributions exist
519         for suite in self.pkg.changes["distribution"].keys():
520             if not Cnf.has_key("Suite::%s" % (suite)):
521                 self.rejects.append("Unknown distribution `%s'." % (suite))
522
523     ###########################################################################
524
525     def binary_file_checks(self, f, session):
526         cnf = Config()
527         entry = self.pkg.files[f]
528
529         # Extract package control information
530         deb_file = utils.open_file(f)
531         try:
532             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
533         except:
534             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
535             deb_file.close()
536             # Can't continue, none of the checks on control would work.
537             return
538
539         # Check for mandantory "Description:"
540         deb_file.seek(0)
541         try:
542             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
543         except:
544             self.rejects.append("%s: Missing Description in binary package" % (f))
545             return
546
547         deb_file.close()
548
549         # Check for mandatory fields
550         for field in [ "Package", "Architecture", "Version" ]:
551             if control.Find(field) == None:
552                 # Can't continue
553                 self.rejects.append("%s: No %s field in control." % (f, field))
554                 return
555
556         # Ensure the package name matches the one give in the .changes
557         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
558             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
559
560         # Validate the package field
561         package = control.Find("Package")
562         if not re_valid_pkg_name.match(package):
563             self.rejects.append("%s: invalid package name '%s'." % (f, package))
564
565         # Validate the version field
566         version = control.Find("Version")
567         if not re_valid_version.match(version):
568             self.rejects.append("%s: invalid version number '%s'." % (f, version))
569
570         # Ensure the architecture of the .deb is one we know about.
571         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
572         architecture = control.Find("Architecture")
573         upload_suite = self.pkg.changes["distribution"].keys()[0]
574
575         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
576             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
577             self.rejects.append("Unknown architecture '%s'." % (architecture))
578
579         # Ensure the architecture of the .deb is one of the ones
580         # listed in the .changes.
581         if not self.pkg.changes["architecture"].has_key(architecture):
582             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
583
584         # Sanity-check the Depends field
585         depends = control.Find("Depends")
586         if depends == '':
587             self.rejects.append("%s: Depends field is empty." % (f))
588
589         # Sanity-check the Provides field
590         provides = control.Find("Provides")
591         if provides:
592             provide = re_spacestrip.sub('', provides)
593             if provide == '':
594                 self.rejects.append("%s: Provides field is empty." % (f))
595             prov_list = provide.split(",")
596             for prov in prov_list:
597                 if not re_valid_pkg_name.match(prov):
598                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
599
600         # Check the section & priority match those given in the .changes (non-fatal)
601         if     control.Find("Section") and entry["section"] != "" \
602            and entry["section"] != control.Find("Section"):
603             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
604                                 (f, control.Find("Section", ""), entry["section"]))
605         if control.Find("Priority") and entry["priority"] != "" \
606            and entry["priority"] != control.Find("Priority"):
607             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
608                                 (f, control.Find("Priority", ""), entry["priority"]))
609
610         entry["package"] = package
611         entry["architecture"] = architecture
612         entry["version"] = version
613         entry["maintainer"] = control.Find("Maintainer", "")
614
615         if f.endswith(".udeb"):
616             self.pkg.files[f]["dbtype"] = "udeb"
617         elif f.endswith(".deb"):
618             self.pkg.files[f]["dbtype"] = "deb"
619         else:
620             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
621
622         entry["source"] = control.Find("Source", entry["package"])
623
624         # Get the source version
625         source = entry["source"]
626         source_version = ""
627
628         if source.find("(") != -1:
629             m = re_extract_src_version.match(source)
630             source = m.group(1)
631             source_version = m.group(2)
632
633         if not source_version:
634             source_version = self.pkg.files[f]["version"]
635
636         entry["source package"] = source
637         entry["source version"] = source_version
638
639         # Ensure the filename matches the contents of the .deb
640         m = re_isadeb.match(f)
641
642         #  package name
643         file_package = m.group(1)
644         if entry["package"] != file_package:
645             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
646                                 (f, file_package, entry["dbtype"], entry["package"]))
647         epochless_version = re_no_epoch.sub('', control.Find("Version"))
648
649         #  version
650         file_version = m.group(2)
651         if epochless_version != file_version:
652             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
653                                 (f, file_version, entry["dbtype"], epochless_version))
654
655         #  architecture
656         file_architecture = m.group(3)
657         if entry["architecture"] != file_architecture:
658             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
659                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
660
661         # Check for existent source
662         source_version = entry["source version"]
663         source_package = entry["source package"]
664         if self.pkg.changes["architecture"].has_key("source"):
665             if source_version != self.pkg.changes["version"]:
666                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
667                                     (source_version, f, self.pkg.changes["version"]))
668         else:
669             # Check in the SQL database
670             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
671                 # Check in one of the other directories
672                 source_epochless_version = re_no_epoch.sub('', source_version)
673                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
674                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
675                     entry["byhand"] = 1
676                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
677                     entry["new"] = 1
678                 else:
679                     dsc_file_exists = False
680                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
681                         if cnf.has_key("Dir::Queue::%s" % (myq)):
682                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
683                                 dsc_file_exists = True
684                                 break
685
686                     if not dsc_file_exists:
687                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
688
689         # Check the version and for file overwrites
690         self.check_binary_against_db(f, session)
691
692         # Temporarily disable contents generation until we change the table storage layout
693         #b = Binary(f)
694         #b.scan_package()
695         #if len(b.rejects) > 0:
696         #    for j in b.rejects:
697         #        self.rejects.append(j)
698
699     def source_file_checks(self, f, session):
700         entry = self.pkg.files[f]
701
702         m = re_issource.match(f)
703         if not m:
704             return
705
706         entry["package"] = m.group(1)
707         entry["version"] = m.group(2)
708         entry["type"] = m.group(3)
709
710         # Ensure the source package name matches the Source filed in the .changes
711         if self.pkg.changes["source"] != entry["package"]:
712             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
713
714         # Ensure the source version matches the version in the .changes file
715         if re_is_orig_source.match(f):
716             changes_version = self.pkg.changes["chopversion2"]
717         else:
718             changes_version = self.pkg.changes["chopversion"]
719
720         if changes_version != entry["version"]:
721             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
722
723         # Ensure the .changes lists source in the Architecture field
724         if not self.pkg.changes["architecture"].has_key("source"):
725             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
726
727         # Check the signature of a .dsc file
728         if entry["type"] == "dsc":
729             # check_signature returns either:
730             #  (None, [list, of, rejects]) or (signature, [])
731             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
732             for j in rejects:
733                 self.rejects.append(j)
734
735         entry["architecture"] = "source"
736
737     def per_suite_file_checks(self, f, suite, session):
738         cnf = Config()
739         entry = self.pkg.files[f]
740
741         # Skip byhand
742         if entry.has_key("byhand"):
743             return
744
745         # Check we have fields we need to do these checks
746         oktogo = True
747         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
748             if not entry.has_key(m):
749                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
750                 oktogo = False
751
752         if not oktogo:
753             return
754
755         # Handle component mappings
756         for m in cnf.ValueList("ComponentMappings"):
757             (source, dest) = m.split()
758             if entry["component"] == source:
759                 entry["original component"] = source
760                 entry["component"] = dest
761
762         # Ensure the component is valid for the target suite
763         if cnf.has_key("Suite:%s::Components" % (suite)) and \
764            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
765             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
766             return
767
768         # Validate the component
769         if not get_component(entry["component"], session):
770             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
771             return
772
773         # See if the package is NEW
774         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
775             entry["new"] = 1
776
777         # Validate the priority
778         if entry["priority"].find('/') != -1:
779             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
780
781         # Determine the location
782         location = cnf["Dir::Pool"]
783         l = get_location(location, entry["component"], session=session)
784         if l is None:
785             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
786             entry["location id"] = -1
787         else:
788             entry["location id"] = l.location_id
789
790         # Check the md5sum & size against existing files (if any)
791         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
792
793         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
794                                          entry["size"], entry["md5sum"], entry["location id"])
795
796         if found is None:
797             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
798         elif found is False and poolfile is not None:
799             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
800         else:
801             if poolfile is None:
802                 entry["files id"] = None
803             else:
804                 entry["files id"] = poolfile.file_id
805
806         # Check for packages that have moved from one component to another
807         entry['suite'] = suite
808         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
809         if res.rowcount > 0:
810             entry["othercomponents"] = res.fetchone()[0]
811
812     def check_files(self, action=True):
813         file_keys = self.pkg.files.keys()
814         holding = Holding()
815         cnf = Config()
816
817         if action:
818             cwd = os.getcwd()
819             os.chdir(self.pkg.directory)
820             for f in file_keys:
821                 ret = holding.copy_to_holding(f)
822                 if ret is not None:
823                     # XXX: Should we bail out here or try and continue?
824                     self.rejects.append(ret)
825
826             os.chdir(cwd)
827
828         # check we already know the changes file
829         # [NB: this check must be done post-suite mapping]
830         base_filename = os.path.basename(self.pkg.changes_file)
831
832         session = DBConn().session()
833
834         try:
835             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
836             # if in the pool or in a queue other than unchecked, reject
837             if (dbc.in_queue is None) \
838                    or (dbc.in_queue is not None
839                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
840                 self.rejects.append("%s file already known to dak" % base_filename)
841         except NoResultFound, e:
842             # not known, good
843             pass
844
845         has_binaries = False
846         has_source = False
847
848         for f, entry in self.pkg.files.items():
849             # Ensure the file does not already exist in one of the accepted directories
850             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
851                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
852                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
853                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
854
855             if not re_taint_free.match(f):
856                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
857
858             # Check the file is readable
859             if os.access(f, os.R_OK) == 0:
860                 # When running in -n, copy_to_holding() won't have
861                 # generated the reject_message, so we need to.
862                 if action:
863                     if os.path.exists(f):
864                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
865                     else:
866                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
867                 entry["type"] = "unreadable"
868                 continue
869
870             # If it's byhand skip remaining checks
871             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
872                 entry["byhand"] = 1
873                 entry["type"] = "byhand"
874
875             # Checks for a binary package...
876             elif re_isadeb.match(f):
877                 has_binaries = True
878                 entry["type"] = "deb"
879
880                 # This routine appends to self.rejects/warnings as appropriate
881                 self.binary_file_checks(f, session)
882
883             # Checks for a source package...
884             elif re_issource.match(f):
885                 has_source = True
886
887                 # This routine appends to self.rejects/warnings as appropriate
888                 self.source_file_checks(f, session)
889
890             # Not a binary or source package?  Assume byhand...
891             else:
892                 entry["byhand"] = 1
893                 entry["type"] = "byhand"
894
895             # Per-suite file checks
896             entry["oldfiles"] = {}
897             for suite in self.pkg.changes["distribution"].keys():
898                 self.per_suite_file_checks(f, suite, session)
899
900         session.close()
901
902         # If the .changes file says it has source, it must have source.
903         if self.pkg.changes["architecture"].has_key("source"):
904             if not has_source:
905                 self.rejects.append("no source found and Architecture line in changes mention source.")
906
907             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
908                 self.rejects.append("source only uploads are not supported.")
909
910     ###########################################################################
911     def check_dsc(self, action=True, session=None):
912         """Returns bool indicating whether or not the source changes are valid"""
913         # Ensure there is source to check
914         if not self.pkg.changes["architecture"].has_key("source"):
915             return True
916
917         # Find the .dsc
918         dsc_filename = None
919         for f, entry in self.pkg.files.items():
920             if entry["type"] == "dsc":
921                 if dsc_filename:
922                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
923                     return False
924                 else:
925                     dsc_filename = f
926
927         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
928         if not dsc_filename:
929             self.rejects.append("source uploads must contain a dsc file")
930             return False
931
932         # Parse the .dsc file
933         try:
934             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
935         except CantOpenError:
936             # if not -n copy_to_holding() will have done this for us...
937             if not action:
938                 self.rejects.append("%s: can't read file." % (dsc_filename))
939         except ParseChangesError, line:
940             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941         except InvalidDscError, line:
942             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
943         except ChangesUnicodeError:
944             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
945
946         # Build up the file list of files mentioned by the .dsc
947         try:
948             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
949         except NoFilesFieldError:
950             self.rejects.append("%s: no Files: field." % (dsc_filename))
951             return False
952         except UnknownFormatError, format:
953             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
954             return False
955         except ParseChangesError, line:
956             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
957             return False
958
959         # Enforce mandatory fields
960         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
961             if not self.pkg.dsc.has_key(i):
962                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
963                 return False
964
965         # Validate the source and version fields
966         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
967             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
968         if not re_valid_version.match(self.pkg.dsc["version"]):
969             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
970
971         # Only a limited list of source formats are allowed in each suite
972         for dist in self.pkg.changes["distribution"].keys():
973             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
974             if self.pkg.dsc["format"] not in allowed:
975                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
976
977         # Validate the Maintainer field
978         try:
979             # We ignore the return value
980             fix_maintainer(self.pkg.dsc["maintainer"])
981         except ParseMaintError, msg:
982             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
983                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
984
985         # Validate the build-depends field(s)
986         for field_name in [ "build-depends", "build-depends-indep" ]:
987             field = self.pkg.dsc.get(field_name)
988             if field:
989                 # Have apt try to parse them...
990                 try:
991                     apt_pkg.ParseSrcDepends(field)
992                 except:
993                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
994
995         # Ensure the version number in the .dsc matches the version number in the .changes
996         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
997         changes_version = self.pkg.files[dsc_filename]["version"]
998
999         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1000             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1001
1002         # Ensure the Files field contain only what's expected
1003         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1004
1005         # Ensure source is newer than existing source in target suites
1006         session = DBConn().session()
1007         self.check_source_against_db(dsc_filename, session)
1008         self.check_dsc_against_db(dsc_filename, session)
1009         session.close()
1010
1011         return True
1012
1013     ###########################################################################
1014
1015     def get_changelog_versions(self, source_dir):
1016         """Extracts a the source package and (optionally) grabs the
1017         version history out of debian/changelog for the BTS."""
1018
1019         cnf = Config()
1020
1021         # Find the .dsc (again)
1022         dsc_filename = None
1023         for f in self.pkg.files.keys():
1024             if self.pkg.files[f]["type"] == "dsc":
1025                 dsc_filename = f
1026
1027         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1028         if not dsc_filename:
1029             return
1030
1031         # Create a symlink mirror of the source files in our temporary directory
1032         for f in self.pkg.files.keys():
1033             m = re_issource.match(f)
1034             if m:
1035                 src = os.path.join(source_dir, f)
1036                 # If a file is missing for whatever reason, give up.
1037                 if not os.path.exists(src):
1038                     return
1039                 ftype = m.group(3)
1040                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1041                    self.pkg.orig_files[f].has_key("path"):
1042                     continue
1043                 dest = os.path.join(os.getcwd(), f)
1044                 os.symlink(src, dest)
1045
1046         # If the orig files are not a part of the upload, create symlinks to the
1047         # existing copies.
1048         for orig_file in self.pkg.orig_files.keys():
1049             if not self.pkg.orig_files[orig_file].has_key("path"):
1050                 continue
1051             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1052             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1053
1054         # Extract the source
1055         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1056         (result, output) = commands.getstatusoutput(cmd)
1057         if (result != 0):
1058             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1059             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1060             return
1061
1062         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1063             return
1064
1065         # Get the upstream version
1066         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1067         if re_strip_revision.search(upstr_version):
1068             upstr_version = re_strip_revision.sub('', upstr_version)
1069
1070         # Ensure the changelog file exists
1071         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1072         if not os.path.exists(changelog_filename):
1073             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1074             return
1075
1076         # Parse the changelog
1077         self.pkg.dsc["bts changelog"] = ""
1078         changelog_file = utils.open_file(changelog_filename)
1079         for line in changelog_file.readlines():
1080             m = re_changelog_versions.match(line)
1081             if m:
1082                 self.pkg.dsc["bts changelog"] += line
1083         changelog_file.close()
1084
1085         # Check we found at least one revision in the changelog
1086         if not self.pkg.dsc["bts changelog"]:
1087             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1088
1089     def check_source(self):
1090         # Bail out if:
1091         #    a) there's no source
1092         # or c) the orig files are MIA
1093         if not self.pkg.changes["architecture"].has_key("source") \
1094            or len(self.pkg.orig_files) == 0:
1095             return
1096
1097         tmpdir = utils.temp_dirname()
1098
1099         # Move into the temporary directory
1100         cwd = os.getcwd()
1101         os.chdir(tmpdir)
1102
1103         # Get the changelog version history
1104         self.get_changelog_versions(cwd)
1105
1106         # Move back and cleanup the temporary tree
1107         os.chdir(cwd)
1108
1109         try:
1110             shutil.rmtree(tmpdir)
1111         except OSError, e:
1112             if e.errno != errno.EACCES:
1113                 print "foobar"
1114                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1115
1116             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1117             # We probably have u-r or u-w directories so chmod everything
1118             # and try again.
1119             cmd = "chmod -R u+rwx %s" % (tmpdir)
1120             result = os.system(cmd)
1121             if result != 0:
1122                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1123             shutil.rmtree(tmpdir)
1124         except Exception, e:
1125             print "foobar2 (%s)" % e
1126             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1127
1128     ###########################################################################
1129     def ensure_hashes(self):
1130         # Make sure we recognise the format of the Files: field in the .changes
1131         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1132         if len(format) == 2:
1133             format = int(format[0]), int(format[1])
1134         else:
1135             format = int(float(format[0])), 0
1136
1137         # We need to deal with the original changes blob, as the fields we need
1138         # might not be in the changes dict serialised into the .dak anymore.
1139         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1140
1141         # Copy the checksums over to the current changes dict.  This will keep
1142         # the existing modifications to it intact.
1143         for field in orig_changes:
1144             if field.startswith('checksums-'):
1145                 self.pkg.changes[field] = orig_changes[field]
1146
1147         # Check for unsupported hashes
1148         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1149             self.rejects.append(j)
1150
1151         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1152             self.rejects.append(j)
1153
1154         # We have to calculate the hash if we have an earlier changes version than
1155         # the hash appears in rather than require it exist in the changes file
1156         for hashname, hashfunc, version in utils.known_hashes:
1157             # TODO: Move _ensure_changes_hash into this class
1158             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1159                 self.rejects.append(j)
1160             if "source" in self.pkg.changes["architecture"]:
1161                 # TODO: Move _ensure_dsc_hash into this class
1162                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1163                     self.rejects.append(j)
1164
1165     def check_hashes(self):
1166         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1167             self.rejects.append(m)
1168
1169         for m in utils.check_size(".changes", self.pkg.files):
1170             self.rejects.append(m)
1171
1172         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1173             self.rejects.append(m)
1174
1175         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1176             self.rejects.append(m)
1177
1178         self.ensure_hashes()
1179
1180     ###########################################################################
1181
1182     def ensure_orig(self, target_dir='.', session=None):
1183         """
1184         Ensures that all orig files mentioned in the changes file are present
1185         in target_dir. If they do not exist, they are symlinked into place.
1186
1187         An list containing the symlinks that were created are returned (so they
1188         can be removed).
1189         """
1190
1191         symlinked = []
1192         cnf = Config()
1193
1194         for filename, entry in self.pkg.dsc_files.iteritems():
1195             if not re_is_orig_source.match(filename):
1196                 # File is not an orig; ignore
1197                 continue
1198
1199             if os.path.exists(filename):
1200                 # File exists, no need to continue
1201                 continue
1202
1203             def symlink_if_valid(path):
1204                 f = utils.open_file(path)
1205                 md5sum = apt_pkg.md5sum(f)
1206                 f.close()
1207
1208                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1209                 expected = (int(entry['size']), entry['md5sum'])
1210
1211                 if fingerprint != expected:
1212                     return False
1213
1214                 dest = os.path.join(target_dir, filename)
1215
1216                 os.symlink(path, dest)
1217                 symlinked.append(dest)
1218
1219                 return True
1220
1221             session_ = session
1222             if session is None:
1223                 session_ = DBConn().session()
1224
1225             found = False
1226
1227             # Look in the pool
1228             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1229                 poolfile_path = os.path.join(
1230                     poolfile.location.path, poolfile.filename
1231                 )
1232
1233                 if symlink_if_valid(poolfile_path):
1234                     found = True
1235                     break
1236
1237             if session is None:
1238                 session_.close()
1239
1240             if found:
1241                 continue
1242
1243             # Look in some other queues for the file
1244             queues = ('New', 'Byhand', 'ProposedUpdates',
1245                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1246
1247             for queue in queues:
1248                 if not cnf.get('Dir::Queue::%s' % queue):
1249                     continue
1250
1251                 queuefile_path = os.path.join(
1252                     cnf['Dir::Queue::%s' % queue], filename
1253                 )
1254
1255                 if not os.path.exists(queuefile_path):
1256                     # Does not exist in this queue
1257                     continue
1258
1259                 if symlink_if_valid(queuefile_path):
1260                     break
1261
1262         return symlinked
1263
1264     ###########################################################################
1265
1266     def check_lintian(self):
1267         """
1268         Extends self.rejects by checking the output of lintian against tags
1269         specified in Dinstall::LintianTags.
1270         """
1271
1272         cnf = Config()
1273
1274         # Don't reject binary uploads
1275         if not self.pkg.changes['architecture'].has_key('source'):
1276             return
1277
1278         # Only check some distributions
1279         for dist in ('unstable', 'experimental'):
1280             if dist in self.pkg.changes['distribution']:
1281                 break
1282         else:
1283             return
1284
1285         # If we do not have a tagfile, don't do anything
1286         tagfile = cnf.get("Dinstall::LintianTags")
1287         if tagfile is None:
1288             return
1289
1290         # Parse the yaml file
1291         sourcefile = file(tagfile, 'r')
1292         sourcecontent = sourcefile.read()
1293         sourcefile.close()
1294
1295         try:
1296             lintiantags = yaml.load(sourcecontent)['lintian']
1297         except yaml.YAMLError, msg:
1298             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1299             return
1300
1301         # Try and find all orig mentioned in the .dsc
1302         symlinked = self.ensure_orig()
1303
1304         # Setup the input file for lintian
1305         fd, temp_filename = utils.temp_filename()
1306         temptagfile = os.fdopen(fd, 'w')
1307         for tags in lintiantags.values():
1308             temptagfile.writelines(['%s\n' % x for x in tags])
1309         temptagfile.close()
1310
1311         try:
1312             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1313                 (temp_filename, self.pkg.changes_file)
1314
1315             result, output = commands.getstatusoutput(cmd)
1316         finally:
1317             # Remove our tempfile and any symlinks we created
1318             os.unlink(temp_filename)
1319
1320             for symlink in symlinked:
1321                 os.unlink(symlink)
1322
1323         if result == 2:
1324             utils.warn("lintian failed for %s [return code: %s]." % \
1325                 (self.pkg.changes_file, result))
1326             utils.warn(utils.prefix_multi_line_string(output, \
1327                 " [possible output:] "))
1328
1329         def log(*txt):
1330             if self.logger:
1331                 self.logger.log(
1332                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1333                 )
1334
1335         # Generate messages
1336         parsed_tags = parse_lintian_output(output)
1337         self.rejects.extend(
1338             generate_reject_messages(parsed_tags, lintiantags, log=log)
1339         )
1340
1341     ###########################################################################
1342     def check_urgency(self):
1343         cnf = Config()
1344         if self.pkg.changes["architecture"].has_key("source"):
1345             if not self.pkg.changes.has_key("urgency"):
1346                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1347             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1348             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1349                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1350                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1351                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1352
1353     ###########################################################################
1354
1355     # Sanity check the time stamps of files inside debs.
1356     # [Files in the near future cause ugly warnings and extreme time
1357     #  travel can cause errors on extraction]
1358
1359     def check_timestamps(self):
1360         Cnf = Config()
1361
1362         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1363         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1364         tar = TarTime(future_cutoff, past_cutoff)
1365
1366         for filename, entry in self.pkg.files.items():
1367             if entry["type"] == "deb":
1368                 tar.reset()
1369                 try:
1370                     deb_file = utils.open_file(filename)
1371                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1372                     deb_file.seek(0)
1373                     try:
1374                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1375                     except SystemError, e:
1376                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1377                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1378                             raise
1379                         deb_file.seek(0)
1380                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1381
1382                     deb_file.close()
1383
1384                     future_files = tar.future_files.keys()
1385                     if future_files:
1386                         num_future_files = len(future_files)
1387                         future_file = future_files[0]
1388                         future_date = tar.future_files[future_file]
1389                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1390                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1391
1392                     ancient_files = tar.ancient_files.keys()
1393                     if ancient_files:
1394                         num_ancient_files = len(ancient_files)
1395                         ancient_file = ancient_files[0]
1396                         ancient_date = tar.ancient_files[ancient_file]
1397                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1398                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1399                 except:
1400                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1401
1402     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1403         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1404             sponsored = False
1405         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1406             sponsored = False
1407             if uid_name == "":
1408                 sponsored = True
1409         else:
1410             sponsored = True
1411             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1412                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1413                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1414                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1415                         self.pkg.changes["sponsoremail"] = uid_email
1416
1417         return sponsored
1418
1419
1420     ###########################################################################
1421     # check_signed_by_key checks
1422     ###########################################################################
1423
1424     def check_signed_by_key(self):
1425         """Ensure the .changes is signed by an authorized uploader."""
1426         session = DBConn().session()
1427
1428         # First of all we check that the person has proper upload permissions
1429         # and that this upload isn't blocked
1430         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1431
1432         if fpr is None:
1433             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1434             return
1435
1436         # TODO: Check that import-keyring adds UIDs properly
1437         if not fpr.uid:
1438             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1439             return
1440
1441         # Check that the fingerprint which uploaded has permission to do so
1442         self.check_upload_permissions(fpr, session)
1443
1444         # Check that this package is not in a transition
1445         self.check_transition(session)
1446
1447         session.close()
1448
1449
1450     def check_upload_permissions(self, fpr, session):
1451         # Check any one-off upload blocks
1452         self.check_upload_blocks(fpr, session)
1453
1454         # Start with DM as a special case
1455         # DM is a special case unfortunately, so we check it first
1456         # (keys with no source access get more access than DMs in one
1457         #  way; DMs can only upload for their packages whether source
1458         #  or binary, whereas keys with no access might be able to
1459         #  upload some binaries)
1460         if fpr.source_acl.access_level == 'dm':
1461             self.check_dm_upload(fpr, session)
1462         else:
1463             # Check source-based permissions for other types
1464             if self.pkg.changes["architecture"].has_key("source") and \
1465                 fpr.source_acl.access_level is None:
1466                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1467                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1468                 self.rejects.append(rej)
1469                 return
1470             # If not a DM, we allow full upload rights
1471             uid_email = "%s@debian.org" % (fpr.uid.uid)
1472             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1473
1474
1475         # Check binary upload permissions
1476         # By this point we know that DMs can't have got here unless they
1477         # are allowed to deal with the package concerned so just apply
1478         # normal checks
1479         if fpr.binary_acl.access_level == 'full':
1480             return
1481
1482         # Otherwise we're in the map case
1483         tmparches = self.pkg.changes["architecture"].copy()
1484         tmparches.pop('source', None)
1485
1486         for bam in fpr.binary_acl_map:
1487             tmparches.pop(bam.architecture.arch_string, None)
1488
1489         if len(tmparches.keys()) > 0:
1490             if fpr.binary_reject:
1491                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1492                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1493                 self.rejects.append(rej)
1494             else:
1495                 # TODO: This is where we'll implement reject vs throw away binaries later
1496                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1497                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1498                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1499                 self.rejects.append(rej)
1500
1501
1502     def check_upload_blocks(self, fpr, session):
1503         """Check whether any upload blocks apply to this source, source
1504            version, uid / fpr combination"""
1505
1506         def block_rej_template(fb):
1507             rej = 'Manual upload block in place for package %s' % fb.source
1508             if fb.version is not None:
1509                 rej += ', version %s' % fb.version
1510             return rej
1511
1512         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1513             # version is None if the block applies to all versions
1514             if fb.version is None or fb.version == self.pkg.changes['version']:
1515                 # Check both fpr and uid - either is enough to cause a reject
1516                 if fb.fpr is not None:
1517                     if fb.fpr.fingerprint == fpr.fingerprint:
1518                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1519                 if fb.uid is not None:
1520                     if fb.uid == fpr.uid:
1521                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1522
1523
1524     def check_dm_upload(self, fpr, session):
1525         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1526         ## none of the uploaded packages are NEW
1527         rej = False
1528         for f in self.pkg.files.keys():
1529             if self.pkg.files[f].has_key("byhand"):
1530                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1531                 rej = True
1532             if self.pkg.files[f].has_key("new"):
1533                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1534                 rej = True
1535
1536         if rej:
1537             return
1538
1539         ## the most recent version of the package uploaded to unstable or
1540         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1541         ## section of its control file
1542         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1543         q = q.join(SrcAssociation)
1544         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1545         q = q.order_by(desc('source.version')).limit(1)
1546
1547         r = q.all()
1548
1549         if len(r) != 1:
1550             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1551             self.rejects.append(rej)
1552             return
1553
1554         r = r[0]
1555         if not r.dm_upload_allowed:
1556             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1557             self.rejects.append(rej)
1558             return
1559
1560         ## the Maintainer: field of the uploaded .changes file corresponds with
1561         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1562         ## uploads)
1563         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1564             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1565
1566         ## the most recent version of the package uploaded to unstable or
1567         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1568         ## non-developer maintainers cannot NMU or hijack packages)
1569
1570         # srcuploaders includes the maintainer
1571         accept = False
1572         for sup in r.srcuploaders:
1573             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1574             # Eww - I hope we never have two people with the same name in Debian
1575             if email == fpr.uid.uid or name == fpr.uid.name:
1576                 accept = True
1577                 break
1578
1579         if not accept:
1580             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1581             return
1582
1583         ## none of the packages are being taken over from other source packages
1584         for b in self.pkg.changes["binary"].keys():
1585             for suite in self.pkg.changes["distribution"].keys():
1586                 q = session.query(DBSource)
1587                 q = q.join(DBBinary).filter_by(package=b)
1588                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1589
1590                 for s in q.all():
1591                     if s.source != self.pkg.changes["source"]:
1592                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1593
1594
1595
1596     def check_transition(self, session):
1597         cnf = Config()
1598
1599         sourcepkg = self.pkg.changes["source"]
1600
1601         # No sourceful upload -> no need to do anything else, direct return
1602         # We also work with unstable uploads, not experimental or those going to some
1603         # proposed-updates queue
1604         if "source" not in self.pkg.changes["architecture"] or \
1605            "unstable" not in self.pkg.changes["distribution"]:
1606             return
1607
1608         # Also only check if there is a file defined (and existant) with
1609         # checks.
1610         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1611         if transpath == "" or not os.path.exists(transpath):
1612             return
1613
1614         # Parse the yaml file
1615         sourcefile = file(transpath, 'r')
1616         sourcecontent = sourcefile.read()
1617         try:
1618             transitions = yaml.load(sourcecontent)
1619         except yaml.YAMLError, msg:
1620             # This shouldn't happen, there is a wrapper to edit the file which
1621             # checks it, but we prefer to be safe than ending up rejecting
1622             # everything.
1623             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1624             return
1625
1626         # Now look through all defined transitions
1627         for trans in transitions:
1628             t = transitions[trans]
1629             source = t["source"]
1630             expected = t["new"]
1631
1632             # Will be None if nothing is in testing.
1633             current = get_source_in_suite(source, "testing", session)
1634             if current is not None:
1635                 compare = apt_pkg.VersionCompare(current.version, expected)
1636
1637             if current is None or compare < 0:
1638                 # This is still valid, the current version in testing is older than
1639                 # the new version we wait for, or there is none in testing yet
1640
1641                 # Check if the source we look at is affected by this.
1642                 if sourcepkg in t['packages']:
1643                     # The source is affected, lets reject it.
1644
1645                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1646                         sourcepkg, trans)
1647
1648                     if current is not None:
1649                         currentlymsg = "at version %s" % (current.version)
1650                     else:
1651                         currentlymsg = "not present in testing"
1652
1653                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1654
1655                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1656 is part of a testing transition designed to get %s migrated (it is
1657 currently %s, we need version %s).  This transition is managed by the
1658 Release Team, and %s is the Release-Team member responsible for it.
1659 Please mail debian-release@lists.debian.org or contact %s directly if you
1660 need further assistance.  You might want to upload to experimental until this
1661 transition is done."""
1662                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1663
1664                     self.rejects.append(rejectmsg)
1665                     return
1666
1667     ###########################################################################
1668     # End check_signed_by_key checks
1669     ###########################################################################
1670
1671     def build_summaries(self):
1672         """ Build a summary of changes the upload introduces. """
1673
1674         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1675
1676         short_summary = summary
1677
1678         # This is for direport's benefit...
1679         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1680
1681         if byhand or new:
1682             summary += "Changes: " + f
1683
1684         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1685
1686         summary += self.announce(short_summary, 0)
1687
1688         return (summary, short_summary)
1689
1690     ###########################################################################
1691
1692     def close_bugs(self, summary, action):
1693         """
1694         Send mail to close bugs as instructed by the closes field in the changes file.
1695         Also add a line to summary if any work was done.
1696
1697         @type summary: string
1698         @param summary: summary text, as given by L{build_summaries}
1699
1700         @type action: bool
1701         @param action: Set to false no real action will be done.
1702
1703         @rtype: string
1704         @return: summary. If action was taken, extended by the list of closed bugs.
1705
1706         """
1707
1708         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1709
1710         bugs = self.pkg.changes["closes"].keys()
1711
1712         if not bugs:
1713             return summary
1714
1715         bugs.sort()
1716         summary += "Closing bugs: "
1717         for bug in bugs:
1718             summary += "%s " % (bug)
1719             if action:
1720                 self.update_subst()
1721                 self.Subst["__BUG_NUMBER__"] = bug
1722                 if self.pkg.changes["distribution"].has_key("stable"):
1723                     self.Subst["__STABLE_WARNING__"] = """
1724 Note that this package is not part of the released stable Debian
1725 distribution.  It may have dependencies on other unreleased software,
1726 or other instabilities.  Please take care if you wish to install it.
1727 The update will eventually make its way into the next released Debian
1728 distribution."""
1729                 else:
1730                     self.Subst["__STABLE_WARNING__"] = ""
1731                 mail_message = utils.TemplateSubst(self.Subst, template)
1732                 utils.send_mail(mail_message)
1733
1734                 # Clear up after ourselves
1735                 del self.Subst["__BUG_NUMBER__"]
1736                 del self.Subst["__STABLE_WARNING__"]
1737
1738         if action and self.logger:
1739             self.logger.log(["closing bugs"] + bugs)
1740
1741         summary += "\n"
1742
1743         return summary
1744
1745     ###########################################################################
1746
1747     def announce(self, short_summary, action):
1748         """
1749         Send an announce mail about a new upload.
1750
1751         @type short_summary: string
1752         @param short_summary: Short summary text to include in the mail
1753
1754         @type action: bool
1755         @param action: Set to false no real action will be done.
1756
1757         @rtype: string
1758         @return: Textstring about action taken.
1759
1760         """
1761
1762         cnf = Config()
1763         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1764
1765         # Only do announcements for source uploads with a recent dpkg-dev installed
1766         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1767            self.pkg.changes["architecture"].has_key("source"):
1768             return ""
1769
1770         lists_done = {}
1771         summary = ""
1772
1773         self.Subst["__SHORT_SUMMARY__"] = short_summary
1774
1775         for dist in self.pkg.changes["distribution"].keys():
1776             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1777             if announce_list == "" or lists_done.has_key(announce_list):
1778                 continue
1779
1780             lists_done[announce_list] = 1
1781             summary += "Announcing to %s\n" % (announce_list)
1782
1783             if action:
1784                 self.update_subst()
1785                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1786                 if cnf.get("Dinstall::TrackingServer") and \
1787                    self.pkg.changes["architecture"].has_key("source"):
1788                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1789                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1790
1791                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1792                 utils.send_mail(mail_message)
1793
1794                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1795
1796         if cnf.FindB("Dinstall::CloseBugs"):
1797             summary = self.close_bugs(summary, action)
1798
1799         del self.Subst["__SHORT_SUMMARY__"]
1800
1801         return summary
1802
1803     ###########################################################################
1804     @session_wrapper
1805     def accept (self, summary, short_summary, session=None):
1806         """
1807         Accept an upload.
1808
1809         This moves all files referenced from the .changes into the pool,
1810         sends the accepted mail, announces to lists, closes bugs and
1811         also checks for override disparities. If enabled it will write out
1812         the version history for the BTS Version Tracking and will finally call
1813         L{queue_build}.
1814
1815         @type summary: string
1816         @param summary: Summary text
1817
1818         @type short_summary: string
1819         @param short_summary: Short summary
1820         """
1821
1822         cnf = Config()
1823         stats = SummaryStats()
1824
1825         print "Installing."
1826         self.logger.log(["installing changes", self.pkg.changes_file])
1827
1828         poolfiles = []
1829
1830         # Add the .dsc file to the DB first
1831         for newfile, entry in self.pkg.files.items():
1832             if entry["type"] == "dsc":
1833                 dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1834                 for j in pfs:
1835                     poolfiles.append(j)
1836
1837         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1838         for newfile, entry in self.pkg.files.items():
1839             if entry["type"] == "deb":
1840                 poolfiles.append(add_deb_to_db(self, newfile, session))
1841
1842         # If this is a sourceful diff only upload that is moving
1843         # cross-component we need to copy the .orig files into the new
1844         # component too for the same reasons as above.
1845         if self.pkg.changes["architecture"].has_key("source"):
1846             for orig_file in self.pkg.orig_files.keys():
1847                 if not self.pkg.orig_files[orig_file].has_key("id"):
1848                     continue # Skip if it's not in the pool
1849                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1850                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1851                     continue # Skip if the location didn't change
1852
1853                 # Do the move
1854                 oldf = get_poolfile_by_id(orig_file_id, session)
1855                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1856                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1857                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1858
1859                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1860
1861                 # TODO: Care about size/md5sum collisions etc
1862                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1863
1864                 if newf is None:
1865                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1866                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1867
1868                     # TODO: Check that there's only 1 here
1869                     source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1870                     dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1871                     dscf.poolfile_id = newf.file_id
1872                     session.add(dscf)
1873                     session.flush()
1874
1875                     poolfiles.append(newf)
1876
1877         # Install the files into the pool
1878         for newfile, entry in self.pkg.files.items():
1879             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1880             utils.move(newfile, destination)
1881             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1882             stats.accept_bytes += float(entry["size"])
1883
1884         # Copy the .changes file across for suite which need it.
1885         copy_changes = {}
1886         for suite_name in self.pkg.changes["distribution"].keys():
1887             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1888                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1889
1890         for dest in copy_changes.keys():
1891             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1892
1893         # We're done - commit the database changes
1894         session.commit()
1895         # Our SQL session will automatically start a new transaction after
1896         # the last commit
1897
1898         # Move the .changes into the 'done' directory
1899         utils.move(self.pkg.changes_file,
1900                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1901
1902         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1903             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1904
1905         # Send accept mail, announce to lists, close bugs and check for
1906         # override disparities
1907         if not cnf["Dinstall::Options::No-Mail"]:
1908             self.update_subst()
1909             self.Subst["__SUITE__"] = ""
1910             self.Subst["__SUMMARY__"] = summary
1911             mail_message = utils.TemplateSubst(self.Subst,
1912                                                os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1913             utils.send_mail(mail_message)
1914             self.announce(short_summary, 1)
1915
1916         ## Helper stuff for DebBugs Version Tracking
1917         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1918             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1919             # the conditionalization on dsc["bts changelog"] should be
1920             # dropped.
1921
1922             # Write out the version history from the changelog
1923             if self.pkg.changes["architecture"].has_key("source") and \
1924                self.pkg.dsc.has_key("bts changelog"):
1925
1926                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1927                 version_history = os.fdopen(fd, 'w')
1928                 version_history.write(self.pkg.dsc["bts changelog"])
1929                 version_history.close()
1930                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1931                                       self.pkg.changes_file[:-8]+".versions")
1932                 os.rename(temp_filename, filename)
1933                 os.chmod(filename, 0644)
1934
1935             # Write out the binary -> source mapping.
1936             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1937             debinfo = os.fdopen(fd, 'w')
1938             for name, entry in sorted(self.pkg.files.items()):
1939                 if entry["type"] == "deb":
1940                     line = " ".join([entry["package"], entry["version"],
1941                                      entry["architecture"], entry["source package"],
1942                                      entry["source version"]])
1943                     debinfo.write(line+"\n")
1944             debinfo.close()
1945             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1946                                   self.pkg.changes_file[:-8]+".debinfo")
1947             os.rename(temp_filename, filename)
1948             os.chmod(filename, 0644)
1949
1950         session.commit()
1951
1952         # Set up our copy queues (e.g. buildd queues)
1953         for suite_name in self.pkg.changes["distribution"].keys():
1954             suite = get_suite(suite_name, session)
1955             for q in suite.copy_queues:
1956                 for f in poolfiles:
1957                     q.add_file_from_pool(f)
1958
1959         session.commit()
1960
1961         # Finally...
1962         stats.accept_count += 1
1963
1964     def check_override(self):
1965         """
1966         Checks override entries for validity. Mails "Override disparity" warnings,
1967         if that feature is enabled.
1968
1969         Abandons the check if
1970           - override disparity checks are disabled
1971           - mail sending is disabled
1972         """
1973
1974         cnf = Config()
1975
1976         # Abandon the check if:
1977         #  a) override disparity checks have been disabled
1978         #  b) we're not sending mail
1979         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1980            cnf["Dinstall::Options::No-Mail"]:
1981             return
1982
1983         summary = self.pkg.check_override()
1984
1985         if summary == "":
1986             return
1987
1988         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1989
1990         self.update_subst()
1991         self.Subst["__SUMMARY__"] = summary
1992         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1993         utils.send_mail(mail_message)
1994         del self.Subst["__SUMMARY__"]
1995
1996     ###########################################################################
1997
1998     def remove(self, from_dir=None):
1999         """
2000         Used (for instance) in p-u to remove the package from unchecked
2001
2002         Also removes the package from holding area.
2003         """
2004         if from_dir is None:
2005             from_dir = self.pkg.directory
2006         h = Holding()
2007
2008         for f in self.pkg.files.keys():
2009             os.unlink(os.path.join(from_dir, f))
2010             if os.path.exists(os.path.join(h.holding_dir, f)):
2011                 os.unlink(os.path.join(h.holding_dir, f))
2012
2013         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2014         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2015             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2016
2017     ###########################################################################
2018
2019     def move_to_queue (self, queue):
2020         """
2021         Move files to a destination queue using the permissions in the table
2022         """
2023         h = Holding()
2024         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2025                    queue.path, perms=int(queue.change_perms, 8))
2026         for f in self.pkg.files.keys():
2027             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2028
2029     ###########################################################################
2030
2031     def force_reject(self, reject_files):
2032         """
2033         Forcefully move files from the current directory to the
2034         reject directory.  If any file already exists in the reject
2035         directory it will be moved to the morgue to make way for
2036         the new file.
2037
2038         @type files: dict
2039         @param files: file dictionary
2040
2041         """
2042
2043         cnf = Config()
2044
2045         for file_entry in reject_files:
2046             # Skip any files which don't exist or which we don't have permission to copy.
2047             if os.access(file_entry, os.R_OK) == 0:
2048                 continue
2049
2050             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2051
2052             try:
2053                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2054             except OSError, e:
2055                 # File exists?  Let's try and move it to the morgue
2056                 if e.errno == errno.EEXIST:
2057                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2058                     try:
2059                         morgue_file = utils.find_next_free(morgue_file)
2060                     except NoFreeFilenameError:
2061                         # Something's either gone badly Pete Tong, or
2062                         # someone is trying to exploit us.
2063                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2064                         return
2065                     utils.move(dest_file, morgue_file, perms=0660)
2066                     try:
2067                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2068                     except OSError, e:
2069                         # Likewise
2070                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2071                         return
2072                 else:
2073                     raise
2074             # If we got here, we own the destination file, so we can
2075             # safely overwrite it.
2076             utils.move(file_entry, dest_file, 1, perms=0660)
2077             os.close(dest_fd)
2078
2079     ###########################################################################
2080     def do_reject (self, manual=0, reject_message="", notes=""):
2081         """
2082         Reject an upload. If called without a reject message or C{manual} is
2083         true, spawn an editor so the user can write one.
2084
2085         @type manual: bool
2086         @param manual: manual or automated rejection
2087
2088         @type reject_message: string
2089         @param reject_message: A reject message
2090
2091         @return: 0
2092
2093         """
2094         # If we weren't given a manual rejection message, spawn an
2095         # editor so the user can add one in...
2096         if manual and not reject_message:
2097             (fd, temp_filename) = utils.temp_filename()
2098             temp_file = os.fdopen(fd, 'w')
2099             if len(notes) > 0:
2100                 for note in notes:
2101                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2102                                     % (note.author, note.version, note.notedate, note.comment))
2103             temp_file.close()
2104             editor = os.environ.get("EDITOR","vi")
2105             answer = 'E'
2106             while answer == 'E':
2107                 os.system("%s %s" % (editor, temp_filename))
2108                 temp_fh = utils.open_file(temp_filename)
2109                 reject_message = "".join(temp_fh.readlines())
2110                 temp_fh.close()
2111                 print "Reject message:"
2112                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2113                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2114                 answer = "XXX"
2115                 while prompt.find(answer) == -1:
2116                     answer = utils.our_raw_input(prompt)
2117                     m = re_default_answer.search(prompt)
2118                     if answer == "":
2119                         answer = m.group(1)
2120                     answer = answer[:1].upper()
2121             os.unlink(temp_filename)
2122             if answer == 'A':
2123                 return 1
2124             elif answer == 'Q':
2125                 sys.exit(0)
2126
2127         print "Rejecting.\n"
2128
2129         cnf = Config()
2130
2131         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2132         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2133
2134         # Move all the files into the reject directory
2135         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2136         self.force_reject(reject_files)
2137
2138         # If we fail here someone is probably trying to exploit the race
2139         # so let's just raise an exception ...
2140         if os.path.exists(reason_filename):
2141             os.unlink(reason_filename)
2142         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2143
2144         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2145
2146         self.update_subst()
2147         if not manual:
2148             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2149             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2150             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2151             os.write(reason_fd, reject_message)
2152             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2153         else:
2154             # Build up the rejection email
2155             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2156             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2157             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2158             self.Subst["__REJECT_MESSAGE__"] = ""
2159             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2160             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2161             # Write the rejection email out as the <foo>.reason file
2162             os.write(reason_fd, reject_mail_message)
2163
2164         del self.Subst["__REJECTOR_ADDRESS__"]
2165         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2166         del self.Subst["__CC__"]
2167
2168         os.close(reason_fd)
2169
2170         # Send the rejection mail if appropriate
2171         if not cnf["Dinstall::Options::No-Mail"]:
2172             utils.send_mail(reject_mail_message)
2173
2174         if self.logger:
2175             self.logger.log(["rejected", self.pkg.changes_file])
2176
2177         return 0
2178
2179     ################################################################################
2180     def in_override_p(self, package, component, suite, binary_type, filename, session):
2181         """
2182         Check if a package already has override entries in the DB
2183
2184         @type package: string
2185         @param package: package name
2186
2187         @type component: string
2188         @param component: database id of the component
2189
2190         @type suite: int
2191         @param suite: database id of the suite
2192
2193         @type binary_type: string
2194         @param binary_type: type of the package
2195
2196         @type filename: string
2197         @param filename: filename we check
2198
2199         @return: the database result. But noone cares anyway.
2200
2201         """
2202
2203         cnf = Config()
2204
2205         if binary_type == "": # must be source
2206             file_type = "dsc"
2207         else:
2208             file_type = binary_type
2209
2210         # Override suite name; used for example with proposed-updates
2211         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2212             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2213
2214         result = get_override(package, suite, component, file_type, session)
2215
2216         # If checking for a source package fall back on the binary override type
2217         if file_type == "dsc" and len(result) < 1:
2218             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2219
2220         # Remember the section and priority so we can check them later if appropriate
2221         if len(result) > 0:
2222             result = result[0]
2223             self.pkg.files[filename]["override section"] = result.section.section
2224             self.pkg.files[filename]["override priority"] = result.priority.priority
2225             return result
2226
2227         return None
2228
2229     ################################################################################
2230     def get_anyversion(self, sv_list, suite):
2231         """
2232         @type sv_list: list
2233         @param sv_list: list of (suite, version) tuples to check
2234
2235         @type suite: string
2236         @param suite: suite name
2237
2238         Description: TODO
2239         """
2240         Cnf = Config()
2241         anyversion = None
2242         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2243         for (s, v) in sv_list:
2244             if s in [ x.lower() for x in anysuite ]:
2245                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2246                     anyversion = v
2247
2248         return anyversion
2249
2250     ################################################################################
2251
2252     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2253         """
2254         @type sv_list: list
2255         @param sv_list: list of (suite, version) tuples to check
2256
2257         @type filename: string
2258         @param filename: XXX
2259
2260         @type new_version: string
2261         @param new_version: XXX
2262
2263         Ensure versions are newer than existing packages in target
2264         suites and that cross-suite version checking rules as
2265         set out in the conf file are satisfied.
2266         """
2267
2268         cnf = Config()
2269
2270         # Check versions for each target suite
2271         for target_suite in self.pkg.changes["distribution"].keys():
2272             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2273             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2274
2275             # Enforce "must be newer than target suite" even if conffile omits it
2276             if target_suite not in must_be_newer_than:
2277                 must_be_newer_than.append(target_suite)
2278
2279             for (suite, existent_version) in sv_list:
2280                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2281
2282                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2283                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2284
2285                 if suite in must_be_older_than and vercmp > -1:
2286                     cansave = 0
2287
2288                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2289                         # we really use the other suite, ignoring the conflicting one ...
2290                         addsuite = self.pkg.changes["distribution-version"][suite]
2291
2292                         add_version = self.get_anyversion(sv_list, addsuite)
2293                         target_version = self.get_anyversion(sv_list, target_suite)
2294
2295                         if not add_version:
2296                             # not add_version can only happen if we map to a suite
2297                             # that doesn't enhance the suite we're propup'ing from.
2298                             # so "propup-ver x a b c; map a d" is a problem only if
2299                             # d doesn't enhance a.
2300                             #
2301                             # i think we could always propagate in this case, rather
2302                             # than complaining. either way, this isn't a REJECT issue
2303                             #
2304                             # And - we really should complain to the dorks who configured dak
2305                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2306                             self.pkg.changes.setdefault("propdistribution", {})
2307                             self.pkg.changes["propdistribution"][addsuite] = 1
2308                             cansave = 1
2309                         elif not target_version:
2310                             # not targets_version is true when the package is NEW
2311                             # we could just stick with the "...old version..." REJECT
2312                             # for this, I think.
2313                             self.rejects.append("Won't propogate NEW packages.")
2314                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2315                             # propogation would be redundant. no need to reject though.
2316                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2317                             cansave = 1
2318                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2319                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2320                             # propogate!!
2321                             self.warnings.append("Propogating upload to %s" % (addsuite))
2322                             self.pkg.changes.setdefault("propdistribution", {})
2323                             self.pkg.changes["propdistribution"][addsuite] = 1
2324                             cansave = 1
2325
2326                     if not cansave:
2327                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2328
2329     ################################################################################
2330     def check_binary_against_db(self, filename, session):
2331         # Ensure version is sane
2332         q = session.query(BinAssociation)
2333         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2334         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2335
2336         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2337                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2338
2339         # Check for any existing copies of the file
2340         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2341         q = q.filter_by(version=self.pkg.files[filename]["version"])
2342         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2343
2344         if q.count() > 0:
2345             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2346
2347     ################################################################################
2348
2349     def check_source_against_db(self, filename, session):
2350         source = self.pkg.dsc.get("source")
2351         version = self.pkg.dsc.get("version")
2352
2353         # Ensure version is sane
2354         q = session.query(SrcAssociation)
2355         q = q.join(DBSource).filter(DBSource.source==source)
2356
2357         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2358                                        filename, version, sourceful=True)
2359
2360     ################################################################################
2361     def check_dsc_against_db(self, filename, session):
2362         """
2363
2364         @warning: NB: this function can remove entries from the 'files' index [if
2365          the orig tarball is a duplicate of the one in the archive]; if
2366          you're iterating over 'files' and call this function as part of
2367          the loop, be sure to add a check to the top of the loop to
2368          ensure you haven't just tried to dereference the deleted entry.
2369
2370         """
2371
2372         Cnf = Config()
2373         self.pkg.orig_files = {} # XXX: do we need to clear it?
2374         orig_files = self.pkg.orig_files
2375
2376         # Try and find all files mentioned in the .dsc.  This has
2377         # to work harder to cope with the multiple possible
2378         # locations of an .orig.tar.gz.
2379         # The ordering on the select is needed to pick the newest orig
2380         # when it exists in multiple places.
2381         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2382             found = None
2383             if self.pkg.files.has_key(dsc_name):
2384                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2385                 actual_size = int(self.pkg.files[dsc_name]["size"])
2386                 found = "%s in incoming" % (dsc_name)
2387
2388                 # Check the file does not already exist in the archive
2389                 ql = get_poolfile_like_name(dsc_name, session)
2390
2391                 # Strip out anything that isn't '%s' or '/%s$'
2392                 for i in ql:
2393                     if not i.filename.endswith(dsc_name):
2394                         ql.remove(i)
2395
2396                 # "[dak] has not broken them.  [dak] has fixed a
2397                 # brokenness.  Your crappy hack exploited a bug in
2398                 # the old dinstall.
2399                 #
2400                 # "(Come on!  I thought it was always obvious that
2401                 # one just doesn't release different files with
2402                 # the same name and version.)"
2403                 #                        -- ajk@ on d-devel@l.d.o
2404
2405                 if len(ql) > 0:
2406                     # Ignore exact matches for .orig.tar.gz
2407                     match = 0
2408                     if re_is_orig_source.match(dsc_name):
2409                         for i in ql:
2410                             if self.pkg.files.has_key(dsc_name) and \
2411                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2412                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2413                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2414                                 # TODO: Don't delete the entry, just mark it as not needed
2415                                 # This would fix the stupidity of changing something we often iterate over
2416                                 # whilst we're doing it
2417                                 del self.pkg.files[dsc_name]
2418                                 dsc_entry["files id"] = i.file_id
2419                                 if not orig_files.has_key(dsc_name):
2420                                     orig_files[dsc_name] = {}
2421                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2422                                 match = 1
2423
2424                     if not match:
2425                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2426
2427             elif re_is_orig_source.match(dsc_name):
2428                 # Check in the pool
2429                 ql = get_poolfile_like_name(dsc_name, session)
2430
2431                 # Strip out anything that isn't '%s' or '/%s$'
2432                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2433                 for i in ql:
2434                     if not i.filename.endswith(dsc_name):
2435                         ql.remove(i)
2436
2437                 if len(ql) > 0:
2438                     # Unfortunately, we may get more than one match here if,
2439                     # for example, the package was in potato but had an -sa
2440                     # upload in woody.  So we need to choose the right one.
2441
2442                     # default to something sane in case we don't match any or have only one
2443                     x = ql[0]
2444
2445                     if len(ql) > 1:
2446                         for i in ql:
2447                             old_file = os.path.join(i.location.path, i.filename)
2448                             old_file_fh = utils.open_file(old_file)
2449                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2450                             old_file_fh.close()
2451                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2452                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2453                                 x = i
2454
2455                     old_file = os.path.join(i.location.path, i.filename)
2456                     old_file_fh = utils.open_file(old_file)
2457                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2458                     old_file_fh.close()
2459                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2460                     found = old_file
2461                     suite_type = x.location.archive_type
2462                     # need this for updating dsc_files in install()
2463                     dsc_entry["files id"] = x.file_id
2464                     # See install() in process-accepted...
2465                     if not orig_files.has_key(dsc_name):
2466                         orig_files[dsc_name] = {}
2467                     orig_files[dsc_name]["id"] = x.file_id
2468                     orig_files[dsc_name]["path"] = old_file
2469                     orig_files[dsc_name]["location"] = x.location.location_id
2470                 else:
2471                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2472                     # Not there? Check the queue directories...
2473                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2474                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2475                             continue
2476                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2477                         if os.path.exists(in_otherdir):
2478                             in_otherdir_fh = utils.open_file(in_otherdir)
2479                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2480                             in_otherdir_fh.close()
2481                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2482                             found = in_otherdir
2483                             if not orig_files.has_key(dsc_name):
2484                                 orig_files[dsc_name] = {}
2485                             orig_files[dsc_name]["path"] = in_otherdir
2486
2487                     if not found:
2488                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2489                         continue
2490             else:
2491                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2492                 continue
2493             if actual_md5 != dsc_entry["md5sum"]:
2494                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2495             if actual_size != int(dsc_entry["size"]):
2496                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2497
2498     ################################################################################
2499     # This is used by process-new and process-holding to recheck a changes file
2500     # at the time we're running.  It mainly wraps various other internal functions
2501     # and is similar to accepted_checks - these should probably be tidied up
2502     # and combined
2503     def recheck(self, session):
2504         cnf = Config()
2505         for f in self.pkg.files.keys():
2506             # The .orig.tar.gz can disappear out from under us is it's a
2507             # duplicate of one in the archive.
2508             if not self.pkg.files.has_key(f):
2509                 continue
2510
2511             entry = self.pkg.files[f]
2512
2513             # Check that the source still exists
2514             if entry["type"] == "deb":
2515                 source_version = entry["source version"]
2516                 source_package = entry["source package"]
2517                 if not self.pkg.changes["architecture"].has_key("source") \
2518                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2519                     source_epochless_version = re_no_epoch.sub('', source_version)
2520                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2521                     found = False
2522                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2523                         if cnf.has_key("Dir::Queue::%s" % (q)):
2524                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2525                                 found = True
2526                     if not found:
2527                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2528
2529             # Version and file overwrite checks
2530             if entry["type"] == "deb":
2531                 self.check_binary_against_db(f, session)
2532             elif entry["type"] == "dsc":
2533                 self.check_source_against_db(f, session)
2534                 self.check_dsc_against_db(f, session)
2535
2536     ################################################################################
2537     def accepted_checks(self, overwrite_checks, session):
2538         # Recheck anything that relies on the database; since that's not
2539         # frozen between accept and our run time when called from p-a.
2540
2541         # overwrite_checks is set to False when installing to stable/oldstable
2542
2543         propogate={}
2544         nopropogate={}
2545
2546         # Find the .dsc (again)
2547         dsc_filename = None
2548         for f in self.pkg.files.keys():
2549             if self.pkg.files[f]["type"] == "dsc":
2550                 dsc_filename = f
2551
2552         for checkfile in self.pkg.files.keys():
2553             # The .orig.tar.gz can disappear out from under us is it's a
2554             # duplicate of one in the archive.
2555             if not self.pkg.files.has_key(checkfile):
2556                 continue
2557
2558             entry = self.pkg.files[checkfile]
2559
2560             # Check that the source still exists
2561             if entry["type"] == "deb":
2562                 source_version = entry["source version"]
2563                 source_package = entry["source package"]
2564                 if not self.pkg.changes["architecture"].has_key("source") \
2565                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2566                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2567
2568             # Version and file overwrite checks
2569             if overwrite_checks:
2570                 if entry["type"] == "deb":
2571                     self.check_binary_against_db(checkfile, session)
2572                 elif entry["type"] == "dsc":
2573                     self.check_source_against_db(checkfile, session)
2574                     self.check_dsc_against_db(dsc_filename, session)
2575
2576             # propogate in the case it is in the override tables:
2577             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2578                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2579                     propogate[suite] = 1
2580                 else:
2581                     nopropogate[suite] = 1
2582
2583         for suite in propogate.keys():
2584             if suite in nopropogate:
2585                 continue
2586             self.pkg.changes["distribution"][suite] = 1
2587
2588         for checkfile in self.pkg.files.keys():
2589             # Check the package is still in the override tables
2590             for suite in self.pkg.changes["distribution"].keys():
2591                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2592                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2593
2594     ################################################################################
2595     # This is not really a reject, but an unaccept, but since a) the code for
2596     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2597     # extremely rare, for now we'll go with whining at our admin folks...
2598
2599     def do_unaccept(self):
2600         cnf = Config()
2601
2602         self.update_subst()
2603         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2604         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2605         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2606         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2607         if cnf.has_key("Dinstall::Bcc"):
2608             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2609
2610         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2611
2612         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2613
2614         # Write the rejection email out as the <foo>.reason file
2615         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2616         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2617
2618         # If we fail here someone is probably trying to exploit the race
2619         # so let's just raise an exception ...
2620         if os.path.exists(reject_filename):
2621             os.unlink(reject_filename)
2622
2623         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2624         os.write(fd, reject_mail_message)
2625         os.close(fd)
2626
2627         utils.send_mail(reject_mail_message)
2628
2629         del self.Subst["__REJECTOR_ADDRESS__"]
2630         del self.Subst["__REJECT_MESSAGE__"]
2631         del self.Subst["__CC__"]
2632
2633     ################################################################################
2634     # If any file of an upload has a recent mtime then chances are good
2635     # the file is still being uploaded.
2636
2637     def upload_too_new(self):
2638         cnf = Config()
2639         too_new = False
2640         # Move back to the original directory to get accurate time stamps
2641         cwd = os.getcwd()
2642         os.chdir(self.pkg.directory)
2643         file_list = self.pkg.files.keys()
2644         file_list.extend(self.pkg.dsc_files.keys())
2645         file_list.append(self.pkg.changes_file)
2646         for f in file_list:
2647             try:
2648                 last_modified = time.time()-os.path.getmtime(f)
2649                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2650                     too_new = True
2651                     break
2652             except:
2653                 pass
2654
2655         os.chdir(cwd)
2656         return too_new