]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Move lintian parsing to daklib.lintian and add tests.
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118         if f["type"] == "byhand":
119             continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     for suite in changes["suite"].keys():
152         for pkg in new.keys():
153             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
154             if len(ql) > 0:
155                 for file_entry in new[pkg]["files"]:
156                     if files[file_entry].has_key("new"):
157                         del files[file_entry]["new"]
158                 del new[pkg]
159
160     if warn:
161         for s in ['stable', 'oldstable']:
162             if changes["suite"].has_key(s):
163                 print "WARNING: overrides will be added for %s!" % s
164         for pkg in new.keys():
165             if new[pkg].has_key("othercomponents"):
166                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
167
168     session.close()
169
170     return new
171
172 ################################################################################
173
174 def check_valid(new):
175     """
176     Check if section and priority for NEW packages exist in database.
177     Additionally does sanity checks:
178       - debian-installer packages have to be udeb (or source)
179       - non debian-installer packages can not be udeb
180       - source priority can only be assigned to dsc file types
181
182     @type new: dict
183     @param new: Dict of new packages with their section, priority and type.
184
185     """
186     for pkg in new.keys():
187         section_name = new[pkg]["section"]
188         priority_name = new[pkg]["priority"]
189         file_type = new[pkg]["type"]
190
191         section = get_section(section_name)
192         if section is None:
193             new[pkg]["section id"] = -1
194         else:
195             new[pkg]["section id"] = section.section_id
196
197         priority = get_priority(priority_name)
198         if priority is None:
199             new[pkg]["priority id"] = -1
200         else:
201             new[pkg]["priority id"] = priority.priority_id
202
203         # Sanity checks
204         di = section_name.find("debian-installer") != -1
205
206         # If d-i, we must be udeb and vice-versa
207         if     (di and file_type not in ("udeb", "dsc")) or \
208            (not di and file_type == "udeb"):
209             new[pkg]["section id"] = -1
210
211         # If dsc we need to be source and vice-versa
212         if (priority == "source" and file_type != "dsc") or \
213            (priority != "source" and file_type == "dsc"):
214             new[pkg]["priority id"] = -1
215
216 ###############################################################################
217
218 def check_status(files):
219     new = byhand = 0
220     for f in files.keys():
221         if files[f]["type"] == "byhand":
222             byhand = 1
223         elif files[f].has_key("new"):
224             new = 1
225     return (new, byhand)
226
227 ###############################################################################
228
229 # Used by Upload.check_timestamps
230 class TarTime(object):
231     def __init__(self, future_cutoff, past_cutoff):
232         self.reset()
233         self.future_cutoff = future_cutoff
234         self.past_cutoff = past_cutoff
235
236     def reset(self):
237         self.future_files = {}
238         self.ancient_files = {}
239
240     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
241         if MTime > self.future_cutoff:
242             self.future_files[Name] = MTime
243         if MTime < self.past_cutoff:
244             self.ancient_files[Name] = MTime
245
246 ###############################################################################
247
248 class Upload(object):
249     """
250     Everything that has to do with an upload processed.
251
252     """
253     def __init__(self):
254         self.logger = None
255         self.pkg = Changes()
256         self.reset()
257
258     ###########################################################################
259
260     def reset (self):
261         """ Reset a number of internal variables."""
262
263         # Initialize the substitution template map
264         cnf = Config()
265         self.Subst = {}
266         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
267         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
268         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
269         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
270
271         self.rejects = []
272         self.warnings = []
273         self.notes = []
274
275         self.pkg.reset()
276
277     def package_info(self):
278         """
279         Format various messages from this Upload to send to the maintainer.
280         """
281
282         msgs = (
283             ('Reject Reasons', self.rejects),
284             ('Warnings', self.warnings),
285             ('Notes', self.notes),
286         )
287
288         msg = ''
289         for title, messages in msgs:
290             if messages:
291                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
292         msg += '\n'
293
294         return msg
295
296     ###########################################################################
297     def update_subst(self):
298         """ Set up the per-package template substitution mappings """
299
300         cnf = Config()
301
302         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
303         if not self.pkg.changes.has_key("architecture") or not \
304            isinstance(self.pkg.changes["architecture"], dict):
305             self.pkg.changes["architecture"] = { "Unknown" : "" }
306
307         # and maintainer2047 may not exist.
308         if not self.pkg.changes.has_key("maintainer2047"):
309             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
310
311         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
312         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
313         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
314
315         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
316         if self.pkg.changes["architecture"].has_key("source") and \
317            self.pkg.changes["changedby822"] != "" and \
318            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
319
320             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
321             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
322             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
323         else:
324             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
325             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
326             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
327
328         if "sponsoremail" in self.pkg.changes:
329             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
330
331         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
332             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
333
334         # Apply any global override of the Maintainer field
335         if cnf.get("Dinstall::OverrideMaintainer"):
336             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
337             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
338
339         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
340         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
341         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
342
343     ###########################################################################
344     def load_changes(self, filename):
345         """
346         @rtype: boolean
347         @rvalue: whether the changes file was valid or not.  We may want to
348                  reject even if this is True (see what gets put in self.rejects).
349                  This is simply to prevent us even trying things later which will
350                  fail because we couldn't properly parse the file.
351         """
352         Cnf = Config()
353         self.pkg.changes_file = filename
354
355         # Parse the .changes field into a dictionary
356         try:
357             self.pkg.changes.update(parse_changes(filename))
358         except CantOpenError:
359             self.rejects.append("%s: can't read file." % (filename))
360             return False
361         except ParseChangesError, line:
362             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
363             return False
364         except ChangesUnicodeError:
365             self.rejects.append("%s: changes file not proper utf-8" % (filename))
366             return False
367
368         # Parse the Files field from the .changes into another dictionary
369         try:
370             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
371         except ParseChangesError, line:
372             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373             return False
374         except UnknownFormatError, format:
375             self.rejects.append("%s: unknown format '%s'." % (filename, format))
376             return False
377
378         # Check for mandatory fields
379         for i in ("distribution", "source", "binary", "architecture",
380                   "version", "maintainer", "files", "changes", "description"):
381             if not self.pkg.changes.has_key(i):
382                 # Avoid undefined errors later
383                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
384                 return False
385
386         # Strip a source version in brackets from the source field
387         if re_strip_srcver.search(self.pkg.changes["source"]):
388             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
389
390         # Ensure the source field is a valid package name.
391         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
392             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
393
394         # Split multi-value fields into a lower-level dictionary
395         for i in ("architecture", "distribution", "binary", "closes"):
396             o = self.pkg.changes.get(i, "")
397             if o != "":
398                 del self.pkg.changes[i]
399
400             self.pkg.changes[i] = {}
401
402             for j in o.split():
403                 self.pkg.changes[i][j] = 1
404
405         # Fix the Maintainer: field to be RFC822/2047 compatible
406         try:
407             (self.pkg.changes["maintainer822"],
408              self.pkg.changes["maintainer2047"],
409              self.pkg.changes["maintainername"],
410              self.pkg.changes["maintaineremail"]) = \
411                    fix_maintainer (self.pkg.changes["maintainer"])
412         except ParseMaintError, msg:
413             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
414                    % (filename, self.pkg.changes["maintainer"], msg))
415
416         # ...likewise for the Changed-By: field if it exists.
417         try:
418             (self.pkg.changes["changedby822"],
419              self.pkg.changes["changedby2047"],
420              self.pkg.changes["changedbyname"],
421              self.pkg.changes["changedbyemail"]) = \
422                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
423         except ParseMaintError, msg:
424             self.pkg.changes["changedby822"] = ""
425             self.pkg.changes["changedby2047"] = ""
426             self.pkg.changes["changedbyname"] = ""
427             self.pkg.changes["changedbyemail"] = ""
428
429             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
430                    % (filename, changes["changed-by"], msg))
431
432         # Ensure all the values in Closes: are numbers
433         if self.pkg.changes.has_key("closes"):
434             for i in self.pkg.changes["closes"].keys():
435                 if re_isanum.match (i) == None:
436                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
437
438         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
439         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
440         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
441
442         # Check the .changes is non-empty
443         if not self.pkg.files:
444             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
445             return False
446
447         # Changes was syntactically valid even if we'll reject
448         return True
449
450     ###########################################################################
451
452     def check_distributions(self):
453         "Check and map the Distribution field"
454
455         Cnf = Config()
456
457         # Handle suite mappings
458         for m in Cnf.ValueList("SuiteMappings"):
459             args = m.split()
460             mtype = args[0]
461             if mtype == "map" or mtype == "silent-map":
462                 (source, dest) = args[1:3]
463                 if self.pkg.changes["distribution"].has_key(source):
464                     del self.pkg.changes["distribution"][source]
465                     self.pkg.changes["distribution"][dest] = 1
466                     if mtype != "silent-map":
467                         self.notes.append("Mapping %s to %s." % (source, dest))
468                 if self.pkg.changes.has_key("distribution-version"):
469                     if self.pkg.changes["distribution-version"].has_key(source):
470                         self.pkg.changes["distribution-version"][source]=dest
471             elif mtype == "map-unreleased":
472                 (source, dest) = args[1:3]
473                 if self.pkg.changes["distribution"].has_key(source):
474                     for arch in self.pkg.changes["architecture"].keys():
475                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
476                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
477                             del self.pkg.changes["distribution"][source]
478                             self.pkg.changes["distribution"][dest] = 1
479                             break
480             elif mtype == "ignore":
481                 suite = args[1]
482                 if self.pkg.changes["distribution"].has_key(suite):
483                     del self.pkg.changes["distribution"][suite]
484                     self.warnings.append("Ignoring %s as a target suite." % (suite))
485             elif mtype == "reject":
486                 suite = args[1]
487                 if self.pkg.changes["distribution"].has_key(suite):
488                     self.rejects.append("Uploads to %s are not accepted." % (suite))
489             elif mtype == "propup-version":
490                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
491                 #
492                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
493                 if self.pkg.changes["distribution"].has_key(args[1]):
494                     self.pkg.changes.setdefault("distribution-version", {})
495                     for suite in args[2:]:
496                         self.pkg.changes["distribution-version"][suite] = suite
497
498         # Ensure there is (still) a target distribution
499         if len(self.pkg.changes["distribution"].keys()) < 1:
500             self.rejects.append("No valid distribution remaining.")
501
502         # Ensure target distributions exist
503         for suite in self.pkg.changes["distribution"].keys():
504             if not Cnf.has_key("Suite::%s" % (suite)):
505                 self.rejects.append("Unknown distribution `%s'." % (suite))
506
507     ###########################################################################
508
509     def binary_file_checks(self, f, session):
510         cnf = Config()
511         entry = self.pkg.files[f]
512
513         # Extract package control information
514         deb_file = utils.open_file(f)
515         try:
516             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
517         except:
518             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
519             deb_file.close()
520             # Can't continue, none of the checks on control would work.
521             return
522
523         # Check for mandantory "Description:"
524         deb_file.seek(0)
525         try:
526             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
527         except:
528             self.rejects.append("%s: Missing Description in binary package" % (f))
529             return
530
531         deb_file.close()
532
533         # Check for mandatory fields
534         for field in [ "Package", "Architecture", "Version" ]:
535             if control.Find(field) == None:
536                 # Can't continue
537                 self.rejects.append("%s: No %s field in control." % (f, field))
538                 return
539
540         # Ensure the package name matches the one give in the .changes
541         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
542             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
543
544         # Validate the package field
545         package = control.Find("Package")
546         if not re_valid_pkg_name.match(package):
547             self.rejects.append("%s: invalid package name '%s'." % (f, package))
548
549         # Validate the version field
550         version = control.Find("Version")
551         if not re_valid_version.match(version):
552             self.rejects.append("%s: invalid version number '%s'." % (f, version))
553
554         # Ensure the architecture of the .deb is one we know about.
555         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
556         architecture = control.Find("Architecture")
557         upload_suite = self.pkg.changes["distribution"].keys()[0]
558
559         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
560             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
561             self.rejects.append("Unknown architecture '%s'." % (architecture))
562
563         # Ensure the architecture of the .deb is one of the ones
564         # listed in the .changes.
565         if not self.pkg.changes["architecture"].has_key(architecture):
566             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
567
568         # Sanity-check the Depends field
569         depends = control.Find("Depends")
570         if depends == '':
571             self.rejects.append("%s: Depends field is empty." % (f))
572
573         # Sanity-check the Provides field
574         provides = control.Find("Provides")
575         if provides:
576             provide = re_spacestrip.sub('', provides)
577             if provide == '':
578                 self.rejects.append("%s: Provides field is empty." % (f))
579             prov_list = provide.split(",")
580             for prov in prov_list:
581                 if not re_valid_pkg_name.match(prov):
582                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
583
584         # Check the section & priority match those given in the .changes (non-fatal)
585         if     control.Find("Section") and entry["section"] != "" \
586            and entry["section"] != control.Find("Section"):
587             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
588                                 (f, control.Find("Section", ""), entry["section"]))
589         if control.Find("Priority") and entry["priority"] != "" \
590            and entry["priority"] != control.Find("Priority"):
591             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
592                                 (f, control.Find("Priority", ""), entry["priority"]))
593
594         entry["package"] = package
595         entry["architecture"] = architecture
596         entry["version"] = version
597         entry["maintainer"] = control.Find("Maintainer", "")
598
599         if f.endswith(".udeb"):
600             self.pkg.files[f]["dbtype"] = "udeb"
601         elif f.endswith(".deb"):
602             self.pkg.files[f]["dbtype"] = "deb"
603         else:
604             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
605
606         entry["source"] = control.Find("Source", entry["package"])
607
608         # Get the source version
609         source = entry["source"]
610         source_version = ""
611
612         if source.find("(") != -1:
613             m = re_extract_src_version.match(source)
614             source = m.group(1)
615             source_version = m.group(2)
616
617         if not source_version:
618             source_version = self.pkg.files[f]["version"]
619
620         entry["source package"] = source
621         entry["source version"] = source_version
622
623         # Ensure the filename matches the contents of the .deb
624         m = re_isadeb.match(f)
625
626         #  package name
627         file_package = m.group(1)
628         if entry["package"] != file_package:
629             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
630                                 (f, file_package, entry["dbtype"], entry["package"]))
631         epochless_version = re_no_epoch.sub('', control.Find("Version"))
632
633         #  version
634         file_version = m.group(2)
635         if epochless_version != file_version:
636             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
637                                 (f, file_version, entry["dbtype"], epochless_version))
638
639         #  architecture
640         file_architecture = m.group(3)
641         if entry["architecture"] != file_architecture:
642             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
643                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
644
645         # Check for existent source
646         source_version = entry["source version"]
647         source_package = entry["source package"]
648         if self.pkg.changes["architecture"].has_key("source"):
649             if source_version != self.pkg.changes["version"]:
650                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
651                                     (source_version, f, self.pkg.changes["version"]))
652         else:
653             # Check in the SQL database
654             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
655                 # Check in one of the other directories
656                 source_epochless_version = re_no_epoch.sub('', source_version)
657                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
658                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
659                     entry["byhand"] = 1
660                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
661                     entry["new"] = 1
662                 else:
663                     dsc_file_exists = False
664                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
665                         if cnf.has_key("Dir::Queue::%s" % (myq)):
666                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
667                                 dsc_file_exists = True
668                                 break
669
670                     if not dsc_file_exists:
671                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
672
673         # Check the version and for file overwrites
674         self.check_binary_against_db(f, session)
675
676         # Temporarily disable contents generation until we change the table storage layout
677         #b = Binary(f)
678         #b.scan_package()
679         #if len(b.rejects) > 0:
680         #    for j in b.rejects:
681         #        self.rejects.append(j)
682
683     def source_file_checks(self, f, session):
684         entry = self.pkg.files[f]
685
686         m = re_issource.match(f)
687         if not m:
688             return
689
690         entry["package"] = m.group(1)
691         entry["version"] = m.group(2)
692         entry["type"] = m.group(3)
693
694         # Ensure the source package name matches the Source filed in the .changes
695         if self.pkg.changes["source"] != entry["package"]:
696             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
697
698         # Ensure the source version matches the version in the .changes file
699         if re_is_orig_source.match(f):
700             changes_version = self.pkg.changes["chopversion2"]
701         else:
702             changes_version = self.pkg.changes["chopversion"]
703
704         if changes_version != entry["version"]:
705             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
706
707         # Ensure the .changes lists source in the Architecture field
708         if not self.pkg.changes["architecture"].has_key("source"):
709             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
710
711         # Check the signature of a .dsc file
712         if entry["type"] == "dsc":
713             # check_signature returns either:
714             #  (None, [list, of, rejects]) or (signature, [])
715             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
716             for j in rejects:
717                 self.rejects.append(j)
718
719         entry["architecture"] = "source"
720
721     def per_suite_file_checks(self, f, suite, session):
722         cnf = Config()
723         entry = self.pkg.files[f]
724
725         # Skip byhand
726         if entry.has_key("byhand"):
727             return
728
729         # Check we have fields we need to do these checks
730         oktogo = True
731         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
732             if not entry.has_key(m):
733                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
734                 oktogo = False
735
736         if not oktogo:
737             return
738
739         # Handle component mappings
740         for m in cnf.ValueList("ComponentMappings"):
741             (source, dest) = m.split()
742             if entry["component"] == source:
743                 entry["original component"] = source
744                 entry["component"] = dest
745
746         # Ensure the component is valid for the target suite
747         if cnf.has_key("Suite:%s::Components" % (suite)) and \
748            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
749             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
750             return
751
752         # Validate the component
753         if not get_component(entry["component"], session):
754             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
755             return
756
757         # See if the package is NEW
758         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
759             entry["new"] = 1
760
761         # Validate the priority
762         if entry["priority"].find('/') != -1:
763             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
764
765         # Determine the location
766         location = cnf["Dir::Pool"]
767         l = get_location(location, entry["component"], session=session)
768         if l is None:
769             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
770             entry["location id"] = -1
771         else:
772             entry["location id"] = l.location_id
773
774         # Check the md5sum & size against existing files (if any)
775         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
776
777         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
778                                          entry["size"], entry["md5sum"], entry["location id"])
779
780         if found is None:
781             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
782         elif found is False and poolfile is not None:
783             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
784         else:
785             if poolfile is None:
786                 entry["files id"] = None
787             else:
788                 entry["files id"] = poolfile.file_id
789
790         # Check for packages that have moved from one component to another
791         entry['suite'] = suite
792         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
793         if res.rowcount > 0:
794             entry["othercomponents"] = res.fetchone()[0]
795
796     def check_files(self, action=True):
797         file_keys = self.pkg.files.keys()
798         holding = Holding()
799         cnf = Config()
800
801         if action:
802             cwd = os.getcwd()
803             os.chdir(self.pkg.directory)
804             for f in file_keys:
805                 ret = holding.copy_to_holding(f)
806                 if ret is not None:
807                     # XXX: Should we bail out here or try and continue?
808                     self.rejects.append(ret)
809
810             os.chdir(cwd)
811
812         # check we already know the changes file
813         # [NB: this check must be done post-suite mapping]
814         base_filename = os.path.basename(self.pkg.changes_file)
815
816         session = DBConn().session()
817
818         try:
819             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
820             # if in the pool or in a queue other than unchecked, reject
821             if (dbc.in_queue is None) \
822                    or (dbc.in_queue is not None
823                        and dbc.in_queue.queue_name != 'unchecked'):
824                 self.rejects.append("%s file already known to dak" % base_filename)
825         except NoResultFound, e:
826             # not known, good
827             pass
828
829         has_binaries = False
830         has_source = False
831
832         for f, entry in self.pkg.files.items():
833             # Ensure the file does not already exist in one of the accepted directories
834             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
835                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
836                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
837                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
838
839             if not re_taint_free.match(f):
840                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
841
842             # Check the file is readable
843             if os.access(f, os.R_OK) == 0:
844                 # When running in -n, copy_to_holding() won't have
845                 # generated the reject_message, so we need to.
846                 if action:
847                     if os.path.exists(f):
848                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
849                     else:
850                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
851                 entry["type"] = "unreadable"
852                 continue
853
854             # If it's byhand skip remaining checks
855             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
856                 entry["byhand"] = 1
857                 entry["type"] = "byhand"
858
859             # Checks for a binary package...
860             elif re_isadeb.match(f):
861                 has_binaries = True
862                 entry["type"] = "deb"
863
864                 # This routine appends to self.rejects/warnings as appropriate
865                 self.binary_file_checks(f, session)
866
867             # Checks for a source package...
868             elif re_issource.match(f):
869                 has_source = True
870
871                 # This routine appends to self.rejects/warnings as appropriate
872                 self.source_file_checks(f, session)
873
874             # Not a binary or source package?  Assume byhand...
875             else:
876                 entry["byhand"] = 1
877                 entry["type"] = "byhand"
878
879             # Per-suite file checks
880             entry["oldfiles"] = {}
881             for suite in self.pkg.changes["distribution"].keys():
882                 self.per_suite_file_checks(f, suite, session)
883
884         session.close()
885
886         # If the .changes file says it has source, it must have source.
887         if self.pkg.changes["architecture"].has_key("source"):
888             if not has_source:
889                 self.rejects.append("no source found and Architecture line in changes mention source.")
890
891             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
892                 self.rejects.append("source only uploads are not supported.")
893
894     ###########################################################################
895     def check_dsc(self, action=True, session=None):
896         """Returns bool indicating whether or not the source changes are valid"""
897         # Ensure there is source to check
898         if not self.pkg.changes["architecture"].has_key("source"):
899             return True
900
901         # Find the .dsc
902         dsc_filename = None
903         for f, entry in self.pkg.files.items():
904             if entry["type"] == "dsc":
905                 if dsc_filename:
906                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
907                     return False
908                 else:
909                     dsc_filename = f
910
911         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
912         if not dsc_filename:
913             self.rejects.append("source uploads must contain a dsc file")
914             return False
915
916         # Parse the .dsc file
917         try:
918             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
919         except CantOpenError:
920             # if not -n copy_to_holding() will have done this for us...
921             if not action:
922                 self.rejects.append("%s: can't read file." % (dsc_filename))
923         except ParseChangesError, line:
924             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
925         except InvalidDscError, line:
926             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
927         except ChangesUnicodeError:
928             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
929
930         # Build up the file list of files mentioned by the .dsc
931         try:
932             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
933         except NoFilesFieldError:
934             self.rejects.append("%s: no Files: field." % (dsc_filename))
935             return False
936         except UnknownFormatError, format:
937             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
938             return False
939         except ParseChangesError, line:
940             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941             return False
942
943         # Enforce mandatory fields
944         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
945             if not self.pkg.dsc.has_key(i):
946                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
947                 return False
948
949         # Validate the source and version fields
950         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
951             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
952         if not re_valid_version.match(self.pkg.dsc["version"]):
953             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
954
955         # Only a limited list of source formats are allowed in each suite
956         for dist in self.pkg.changes["distribution"].keys():
957             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
958             if self.pkg.dsc["format"] not in allowed:
959                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
960
961         # Validate the Maintainer field
962         try:
963             # We ignore the return value
964             fix_maintainer(self.pkg.dsc["maintainer"])
965         except ParseMaintError, msg:
966             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
967                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
968
969         # Validate the build-depends field(s)
970         for field_name in [ "build-depends", "build-depends-indep" ]:
971             field = self.pkg.dsc.get(field_name)
972             if field:
973                 # Have apt try to parse them...
974                 try:
975                     apt_pkg.ParseSrcDepends(field)
976                 except:
977                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
978
979         # Ensure the version number in the .dsc matches the version number in the .changes
980         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
981         changes_version = self.pkg.files[dsc_filename]["version"]
982
983         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
984             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
985
986         # Ensure the Files field contain only what's expected
987         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
988
989         # Ensure source is newer than existing source in target suites
990         session = DBConn().session()
991         self.check_source_against_db(dsc_filename, session)
992         self.check_dsc_against_db(dsc_filename, session)
993         session.close()
994
995         return True
996
997     ###########################################################################
998
999     def get_changelog_versions(self, source_dir):
1000         """Extracts a the source package and (optionally) grabs the
1001         version history out of debian/changelog for the BTS."""
1002
1003         cnf = Config()
1004
1005         # Find the .dsc (again)
1006         dsc_filename = None
1007         for f in self.pkg.files.keys():
1008             if self.pkg.files[f]["type"] == "dsc":
1009                 dsc_filename = f
1010
1011         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1012         if not dsc_filename:
1013             return
1014
1015         # Create a symlink mirror of the source files in our temporary directory
1016         for f in self.pkg.files.keys():
1017             m = re_issource.match(f)
1018             if m:
1019                 src = os.path.join(source_dir, f)
1020                 # If a file is missing for whatever reason, give up.
1021                 if not os.path.exists(src):
1022                     return
1023                 ftype = m.group(3)
1024                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1025                    self.pkg.orig_files[f].has_key("path"):
1026                     continue
1027                 dest = os.path.join(os.getcwd(), f)
1028                 os.symlink(src, dest)
1029
1030         # If the orig files are not a part of the upload, create symlinks to the
1031         # existing copies.
1032         for orig_file in self.pkg.orig_files.keys():
1033             if not self.pkg.orig_files[orig_file].has_key("path"):
1034                 continue
1035             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1036             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1037
1038         # Extract the source
1039         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1040         (result, output) = commands.getstatusoutput(cmd)
1041         if (result != 0):
1042             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1043             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1044             return
1045
1046         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1047             return
1048
1049         # Get the upstream version
1050         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1051         if re_strip_revision.search(upstr_version):
1052             upstr_version = re_strip_revision.sub('', upstr_version)
1053
1054         # Ensure the changelog file exists
1055         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1056         if not os.path.exists(changelog_filename):
1057             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1058             return
1059
1060         # Parse the changelog
1061         self.pkg.dsc["bts changelog"] = ""
1062         changelog_file = utils.open_file(changelog_filename)
1063         for line in changelog_file.readlines():
1064             m = re_changelog_versions.match(line)
1065             if m:
1066                 self.pkg.dsc["bts changelog"] += line
1067         changelog_file.close()
1068
1069         # Check we found at least one revision in the changelog
1070         if not self.pkg.dsc["bts changelog"]:
1071             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1072
1073     def check_source(self):
1074         # Bail out if:
1075         #    a) there's no source
1076         # or c) the orig files are MIA
1077         if not self.pkg.changes["architecture"].has_key("source") \
1078            or len(self.pkg.orig_files) == 0:
1079             return
1080
1081         tmpdir = utils.temp_dirname()
1082
1083         # Move into the temporary directory
1084         cwd = os.getcwd()
1085         os.chdir(tmpdir)
1086
1087         # Get the changelog version history
1088         self.get_changelog_versions(cwd)
1089
1090         # Move back and cleanup the temporary tree
1091         os.chdir(cwd)
1092
1093         try:
1094             shutil.rmtree(tmpdir)
1095         except OSError, e:
1096             if e.errno != errno.EACCES:
1097                 print "foobar"
1098                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1099
1100             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1101             # We probably have u-r or u-w directories so chmod everything
1102             # and try again.
1103             cmd = "chmod -R u+rwx %s" % (tmpdir)
1104             result = os.system(cmd)
1105             if result != 0:
1106                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1107             shutil.rmtree(tmpdir)
1108         except Exception, e:
1109             print "foobar2 (%s)" % e
1110             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1111
1112     ###########################################################################
1113     def ensure_hashes(self):
1114         # Make sure we recognise the format of the Files: field in the .changes
1115         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1116         if len(format) == 2:
1117             format = int(format[0]), int(format[1])
1118         else:
1119             format = int(float(format[0])), 0
1120
1121         # We need to deal with the original changes blob, as the fields we need
1122         # might not be in the changes dict serialised into the .dak anymore.
1123         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1124
1125         # Copy the checksums over to the current changes dict.  This will keep
1126         # the existing modifications to it intact.
1127         for field in orig_changes:
1128             if field.startswith('checksums-'):
1129                 self.pkg.changes[field] = orig_changes[field]
1130
1131         # Check for unsupported hashes
1132         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1133             self.rejects.append(j)
1134
1135         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1136             self.rejects.append(j)
1137
1138         # We have to calculate the hash if we have an earlier changes version than
1139         # the hash appears in rather than require it exist in the changes file
1140         for hashname, hashfunc, version in utils.known_hashes:
1141             # TODO: Move _ensure_changes_hash into this class
1142             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1143                 self.rejects.append(j)
1144             if "source" in self.pkg.changes["architecture"]:
1145                 # TODO: Move _ensure_dsc_hash into this class
1146                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1147                     self.rejects.append(j)
1148
1149     def check_hashes(self):
1150         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1151             self.rejects.append(m)
1152
1153         for m in utils.check_size(".changes", self.pkg.files):
1154             self.rejects.append(m)
1155
1156         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1157             self.rejects.append(m)
1158
1159         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1160             self.rejects.append(m)
1161
1162         self.ensure_hashes()
1163
1164     ###########################################################################
1165
1166     def ensure_orig(self, target_dir='.', session=None):
1167         """
1168         Ensures that all orig files mentioned in the changes file are present
1169         in target_dir. If they do not exist, they are symlinked into place.
1170
1171         An list containing the symlinks that were created are returned (so they
1172         can be removed).
1173         """
1174
1175         symlinked = []
1176         cnf = Config()
1177
1178         for filename, entry in self.pkg.dsc_files.iteritems():
1179             if not re_is_orig_source.match(filename):
1180                 # File is not an orig; ignore
1181                 continue
1182
1183             if os.path.exists(filename):
1184                 # File exists, no need to continue
1185                 continue
1186
1187             def symlink_if_valid(path):
1188                 f = utils.open_file(path)
1189                 md5sum = apt_pkg.md5sum(f)
1190                 f.close()
1191
1192                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1193                 expected = (int(entry['size']), entry['md5sum'])
1194
1195                 if fingerprint != expected:
1196                     return False
1197
1198                 dest = os.path.join(target_dir, filename)
1199
1200                 os.symlink(path, dest)
1201                 symlinked.append(dest)
1202
1203                 return True
1204
1205             session_ = session
1206             if session is None:
1207                 session_ = DBConn().session()
1208
1209             found = False
1210
1211             # Look in the pool
1212             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1213                 poolfile_path = os.path.join(
1214                     poolfile.location.path, poolfile.filename
1215                 )
1216
1217                 if symlink_if_valid(poolfile_path):
1218                     found = True
1219                     break
1220
1221             if session is None:
1222                 session_.close()
1223
1224             if found:
1225                 continue
1226
1227             # Look in some other queues for the file
1228             queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1229                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1230
1231             for queue in queues:
1232                 if not cnf.get('Dir::Queue::%s' % queue):
1233                     continue
1234
1235                 queuefile_path = os.path.join(
1236                     cnf['Dir::Queue::%s' % queue], filename
1237                 )
1238
1239                 if not os.path.exists(queuefile_path):
1240                     # Does not exist in this queue
1241                     continue
1242
1243                 if symlink_if_valid(queuefile_path):
1244                     break
1245
1246         return symlinked
1247
1248     ###########################################################################
1249
1250     def check_lintian(self):
1251         cnf = Config()
1252
1253         # Don't reject binary uploads
1254         if not self.pkg.changes['architecture'].has_key('source'):
1255             return
1256
1257         # Only check some distributions
1258         valid_dist = False
1259         for dist in ('unstable', 'experimental'):
1260             if dist in self.pkg.changes['distribution']:
1261                 valid_dist = True
1262                 break
1263
1264         if not valid_dist:
1265             return
1266
1267         tagfile = cnf.get("Dinstall::LintianTags")
1268         if tagfile is None:
1269             # We don't have a tagfile, so just don't do anything.
1270             return
1271
1272         # Parse the yaml file
1273         sourcefile = file(tagfile, 'r')
1274         sourcecontent = sourcefile.read()
1275         sourcefile.close()
1276         try:
1277             lintiantags = yaml.load(sourcecontent)['lintian']
1278         except yaml.YAMLError, msg:
1279             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1280             return
1281
1282         # Try and find all orig mentioned in the .dsc
1283         symlinked = self.ensure_orig()
1284
1285         # Now setup the input file for lintian. lintian wants "one tag per line" only,
1286         # so put it together like it. We put all types of tags in one file and then sort
1287         # through lintians output later to see if its a fatal tag we detected, or not.
1288         # So we only run lintian once on all tags, even if we might reject on some, but not
1289         # reject on others.
1290         # Additionally build up a set of tags
1291         tags = set()
1292         (fd, temp_filename) = utils.temp_filename()
1293         temptagfile = os.fdopen(fd, 'w')
1294         for tagtype in lintiantags:
1295             for tag in lintiantags[tagtype]:
1296                 temptagfile.write("%s\n" % tag)
1297                 tags.add(tag)
1298         temptagfile.close()
1299
1300         # So now we should look at running lintian at the .changes file, capturing output
1301         # to then parse it.
1302         command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1303         (result, output) = commands.getstatusoutput(command)
1304
1305         # We are done with lintian, remove our tempfile and any symlinks we created
1306         os.unlink(temp_filename)
1307         for symlink in symlinked:
1308             os.unlink(symlink)
1309
1310         if (result == 2):
1311             utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1312             utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1313
1314         if len(output) == 0:
1315             return
1316
1317         def log(*txt):
1318             if self.logger:
1319                 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1320
1321         for etype, epackage, etag, etext in parse_lintian_output(output):
1322
1323             # So lets check if we know the tag at all.
1324             if etag not in tags:
1325                 continue
1326
1327             if etype == 'O':
1328                 # We know it and it is overriden. Check that override is allowed.
1329                 if etag in lintiantags['nonfatal']:
1330                     # The tag is overriden, and it is allowed to be overriden.
1331                     # Don't add a reject message.
1332                     pass
1333                 elif etag in lintiantags['fatal']:
1334                     # The tag is overriden - but is not allowed to be
1335                     self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1336                     log("ftpmaster does not allow tag to be overridable", etag)
1337             else:
1338                 # Tag is known, it is not overriden, direct reject.
1339                 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1340                 # Now tell if they *might* override it.
1341                 if etag in lintiantags['nonfatal']:
1342                     log("auto rejecting", "overridable", etag)
1343                     self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1344                 else:
1345                     log("auto rejecting", "not overridable", etag)
1346
1347     ###########################################################################
1348     def check_urgency(self):
1349         cnf = Config()
1350         if self.pkg.changes["architecture"].has_key("source"):
1351             if not self.pkg.changes.has_key("urgency"):
1352                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1353             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1354             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1355                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1356                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1357                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1358
1359     ###########################################################################
1360
1361     # Sanity check the time stamps of files inside debs.
1362     # [Files in the near future cause ugly warnings and extreme time
1363     #  travel can cause errors on extraction]
1364
1365     def check_timestamps(self):
1366         Cnf = Config()
1367
1368         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1369         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1370         tar = TarTime(future_cutoff, past_cutoff)
1371
1372         for filename, entry in self.pkg.files.items():
1373             if entry["type"] == "deb":
1374                 tar.reset()
1375                 try:
1376                     deb_file = utils.open_file(filename)
1377                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1378                     deb_file.seek(0)
1379                     try:
1380                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1381                     except SystemError, e:
1382                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1383                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1384                             raise
1385                         deb_file.seek(0)
1386                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1387
1388                     deb_file.close()
1389
1390                     future_files = tar.future_files.keys()
1391                     if future_files:
1392                         num_future_files = len(future_files)
1393                         future_file = future_files[0]
1394                         future_date = tar.future_files[future_file]
1395                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1396                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1397
1398                     ancient_files = tar.ancient_files.keys()
1399                     if ancient_files:
1400                         num_ancient_files = len(ancient_files)
1401                         ancient_file = ancient_files[0]
1402                         ancient_date = tar.ancient_files[ancient_file]
1403                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1404                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1405                 except:
1406                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1407
1408     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1409         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1410             sponsored = False
1411         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1412             sponsored = False
1413             if uid_name == "":
1414                 sponsored = True
1415         else:
1416             sponsored = True
1417             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1418                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1419                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1420                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1421                         self.pkg.changes["sponsoremail"] = uid_email
1422
1423         return sponsored
1424
1425
1426     ###########################################################################
1427     # check_signed_by_key checks
1428     ###########################################################################
1429
1430     def check_signed_by_key(self):
1431         """Ensure the .changes is signed by an authorized uploader."""
1432         session = DBConn().session()
1433
1434         # First of all we check that the person has proper upload permissions
1435         # and that this upload isn't blocked
1436         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1437
1438         if fpr is None:
1439             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1440             return
1441
1442         # TODO: Check that import-keyring adds UIDs properly
1443         if not fpr.uid:
1444             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1445             return
1446
1447         # Check that the fingerprint which uploaded has permission to do so
1448         self.check_upload_permissions(fpr, session)
1449
1450         # Check that this package is not in a transition
1451         self.check_transition(session)
1452
1453         session.close()
1454
1455
1456     def check_upload_permissions(self, fpr, session):
1457         # Check any one-off upload blocks
1458         self.check_upload_blocks(fpr, session)
1459
1460         # Start with DM as a special case
1461         # DM is a special case unfortunately, so we check it first
1462         # (keys with no source access get more access than DMs in one
1463         #  way; DMs can only upload for their packages whether source
1464         #  or binary, whereas keys with no access might be able to
1465         #  upload some binaries)
1466         if fpr.source_acl.access_level == 'dm':
1467             self.check_dm_upload(fpr, session)
1468         else:
1469             # Check source-based permissions for other types
1470             if self.pkg.changes["architecture"].has_key("source"):
1471                 if fpr.source_acl.access_level is None:
1472                     rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1473                     rej += '\nPlease contact ftpmaster if you think this is incorrect'
1474                     self.rejects.append(rej)
1475                     return
1476             else:
1477                 # If not a DM, we allow full upload rights
1478                 uid_email = "%s@debian.org" % (fpr.uid.uid)
1479                 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1480
1481
1482         # Check binary upload permissions
1483         # By this point we know that DMs can't have got here unless they
1484         # are allowed to deal with the package concerned so just apply
1485         # normal checks
1486         if fpr.binary_acl.access_level == 'full':
1487             return
1488
1489         # Otherwise we're in the map case
1490         tmparches = self.pkg.changes["architecture"].copy()
1491         tmparches.pop('source', None)
1492
1493         for bam in fpr.binary_acl_map:
1494             tmparches.pop(bam.architecture.arch_string, None)
1495
1496         if len(tmparches.keys()) > 0:
1497             if fpr.binary_reject:
1498                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1499                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1500                 self.rejects.append(rej)
1501             else:
1502                 # TODO: This is where we'll implement reject vs throw away binaries later
1503                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1504                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1505                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1506                 self.rejects.append(rej)
1507
1508
1509     def check_upload_blocks(self, fpr, session):
1510         """Check whether any upload blocks apply to this source, source
1511            version, uid / fpr combination"""
1512
1513         def block_rej_template(fb):
1514             rej = 'Manual upload block in place for package %s' % fb.source
1515             if fb.version is not None:
1516                 rej += ', version %s' % fb.version
1517             return rej
1518
1519         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1520             # version is None if the block applies to all versions
1521             if fb.version is None or fb.version == self.pkg.changes['version']:
1522                 # Check both fpr and uid - either is enough to cause a reject
1523                 if fb.fpr is not None:
1524                     if fb.fpr.fingerprint == fpr.fingerprint:
1525                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1526                 if fb.uid is not None:
1527                     if fb.uid == fpr.uid:
1528                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1529
1530
1531     def check_dm_upload(self, fpr, session):
1532         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1533         ## none of the uploaded packages are NEW
1534         rej = False
1535         for f in self.pkg.files.keys():
1536             if self.pkg.files[f].has_key("byhand"):
1537                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1538                 rej = True
1539             if self.pkg.files[f].has_key("new"):
1540                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1541                 rej = True
1542
1543         if rej:
1544             return
1545
1546         ## the most recent version of the package uploaded to unstable or
1547         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1548         ## section of its control file
1549         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1550         q = q.join(SrcAssociation)
1551         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1552         q = q.order_by(desc('source.version')).limit(1)
1553
1554         r = q.all()
1555
1556         if len(r) != 1:
1557             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1558             self.rejects.append(rej)
1559             return
1560
1561         r = r[0]
1562         if not r.dm_upload_allowed:
1563             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1564             self.rejects.append(rej)
1565             return
1566
1567         ## the Maintainer: field of the uploaded .changes file corresponds with
1568         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1569         ## uploads)
1570         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1571             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1572
1573         ## the most recent version of the package uploaded to unstable or
1574         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1575         ## non-developer maintainers cannot NMU or hijack packages)
1576
1577         # srcuploaders includes the maintainer
1578         accept = False
1579         for sup in r.srcuploaders:
1580             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1581             # Eww - I hope we never have two people with the same name in Debian
1582             if email == fpr.uid.uid or name == fpr.uid.name:
1583                 accept = True
1584                 break
1585
1586         if not accept:
1587             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1588             return
1589
1590         ## none of the packages are being taken over from other source packages
1591         for b in self.pkg.changes["binary"].keys():
1592             for suite in self.pkg.changes["distribution"].keys():
1593                 q = session.query(DBSource)
1594                 q = q.join(DBBinary).filter_by(package=b)
1595                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1596
1597                 for s in q.all():
1598                     if s.source != self.pkg.changes["source"]:
1599                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1600
1601
1602
1603     def check_transition(self, session):
1604         cnf = Config()
1605
1606         sourcepkg = self.pkg.changes["source"]
1607
1608         # No sourceful upload -> no need to do anything else, direct return
1609         # We also work with unstable uploads, not experimental or those going to some
1610         # proposed-updates queue
1611         if "source" not in self.pkg.changes["architecture"] or \
1612            "unstable" not in self.pkg.changes["distribution"]:
1613             return
1614
1615         # Also only check if there is a file defined (and existant) with
1616         # checks.
1617         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1618         if transpath == "" or not os.path.exists(transpath):
1619             return
1620
1621         # Parse the yaml file
1622         sourcefile = file(transpath, 'r')
1623         sourcecontent = sourcefile.read()
1624         try:
1625             transitions = yaml.load(sourcecontent)
1626         except yaml.YAMLError, msg:
1627             # This shouldn't happen, there is a wrapper to edit the file which
1628             # checks it, but we prefer to be safe than ending up rejecting
1629             # everything.
1630             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1631             return
1632
1633         # Now look through all defined transitions
1634         for trans in transitions:
1635             t = transitions[trans]
1636             source = t["source"]
1637             expected = t["new"]
1638
1639             # Will be None if nothing is in testing.
1640             current = get_source_in_suite(source, "testing", session)
1641             if current is not None:
1642                 compare = apt_pkg.VersionCompare(current.version, expected)
1643
1644             if current is None or compare < 0:
1645                 # This is still valid, the current version in testing is older than
1646                 # the new version we wait for, or there is none in testing yet
1647
1648                 # Check if the source we look at is affected by this.
1649                 if sourcepkg in t['packages']:
1650                     # The source is affected, lets reject it.
1651
1652                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1653                         sourcepkg, trans)
1654
1655                     if current is not None:
1656                         currentlymsg = "at version %s" % (current.version)
1657                     else:
1658                         currentlymsg = "not present in testing"
1659
1660                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1661
1662                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1663 is part of a testing transition designed to get %s migrated (it is
1664 currently %s, we need version %s).  This transition is managed by the
1665 Release Team, and %s is the Release-Team member responsible for it.
1666 Please mail debian-release@lists.debian.org or contact %s directly if you
1667 need further assistance.  You might want to upload to experimental until this
1668 transition is done."""
1669                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1670
1671                     self.rejects.append(rejectmsg)
1672                     return
1673
1674     ###########################################################################
1675     # End check_signed_by_key checks
1676     ###########################################################################
1677
1678     def build_summaries(self):
1679         """ Build a summary of changes the upload introduces. """
1680
1681         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1682
1683         short_summary = summary
1684
1685         # This is for direport's benefit...
1686         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1687
1688         if byhand or new:
1689             summary += "Changes: " + f
1690
1691         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1692
1693         summary += self.announce(short_summary, 0)
1694
1695         return (summary, short_summary)
1696
1697     ###########################################################################
1698
1699     def close_bugs(self, summary, action):
1700         """
1701         Send mail to close bugs as instructed by the closes field in the changes file.
1702         Also add a line to summary if any work was done.
1703
1704         @type summary: string
1705         @param summary: summary text, as given by L{build_summaries}
1706
1707         @type action: bool
1708         @param action: Set to false no real action will be done.
1709
1710         @rtype: string
1711         @return: summary. If action was taken, extended by the list of closed bugs.
1712
1713         """
1714
1715         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1716
1717         bugs = self.pkg.changes["closes"].keys()
1718
1719         if not bugs:
1720             return summary
1721
1722         bugs.sort()
1723         summary += "Closing bugs: "
1724         for bug in bugs:
1725             summary += "%s " % (bug)
1726             if action:
1727                 self.update_subst()
1728                 self.Subst["__BUG_NUMBER__"] = bug
1729                 if self.pkg.changes["distribution"].has_key("stable"):
1730                     self.Subst["__STABLE_WARNING__"] = """
1731 Note that this package is not part of the released stable Debian
1732 distribution.  It may have dependencies on other unreleased software,
1733 or other instabilities.  Please take care if you wish to install it.
1734 The update will eventually make its way into the next released Debian
1735 distribution."""
1736                 else:
1737                     self.Subst["__STABLE_WARNING__"] = ""
1738                 mail_message = utils.TemplateSubst(self.Subst, template)
1739                 utils.send_mail(mail_message)
1740
1741                 # Clear up after ourselves
1742                 del self.Subst["__BUG_NUMBER__"]
1743                 del self.Subst["__STABLE_WARNING__"]
1744
1745         if action and self.logger:
1746             self.logger.log(["closing bugs"] + bugs)
1747
1748         summary += "\n"
1749
1750         return summary
1751
1752     ###########################################################################
1753
1754     def announce(self, short_summary, action):
1755         """
1756         Send an announce mail about a new upload.
1757
1758         @type short_summary: string
1759         @param short_summary: Short summary text to include in the mail
1760
1761         @type action: bool
1762         @param action: Set to false no real action will be done.
1763
1764         @rtype: string
1765         @return: Textstring about action taken.
1766
1767         """
1768
1769         cnf = Config()
1770         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1771
1772         # Only do announcements for source uploads with a recent dpkg-dev installed
1773         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1774            self.pkg.changes["architecture"].has_key("source"):
1775             return ""
1776
1777         lists_done = {}
1778         summary = ""
1779
1780         self.Subst["__SHORT_SUMMARY__"] = short_summary
1781
1782         for dist in self.pkg.changes["distribution"].keys():
1783             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1784             if announce_list == "" or lists_done.has_key(announce_list):
1785                 continue
1786
1787             lists_done[announce_list] = 1
1788             summary += "Announcing to %s\n" % (announce_list)
1789
1790             if action:
1791                 self.update_subst()
1792                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1793                 if cnf.get("Dinstall::TrackingServer") and \
1794                    self.pkg.changes["architecture"].has_key("source"):
1795                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1796                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1797
1798                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1799                 utils.send_mail(mail_message)
1800
1801                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1802
1803         if cnf.FindB("Dinstall::CloseBugs"):
1804             summary = self.close_bugs(summary, action)
1805
1806         del self.Subst["__SHORT_SUMMARY__"]
1807
1808         return summary
1809
1810     ###########################################################################
1811     @session_wrapper
1812     def accept (self, summary, short_summary, session=None):
1813         """
1814         Accept an upload.
1815
1816         This moves all files referenced from the .changes into the pool,
1817         sends the accepted mail, announces to lists, closes bugs and
1818         also checks for override disparities. If enabled it will write out
1819         the version history for the BTS Version Tracking and will finally call
1820         L{queue_build}.
1821
1822         @type summary: string
1823         @param summary: Summary text
1824
1825         @type short_summary: string
1826         @param short_summary: Short summary
1827         """
1828
1829         cnf = Config()
1830         stats = SummaryStats()
1831
1832         print "Installing."
1833         self.logger.log(["installing changes", self.pkg.changes_file])
1834
1835         poolfiles = []
1836
1837         # Add the .dsc file to the DB first
1838         for newfile, entry in self.pkg.files.items():
1839             if entry["type"] == "dsc":
1840                 dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1841                 for j in pfs:
1842                     poolfiles.append(j)
1843
1844         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1845         for newfile, entry in self.pkg.files.items():
1846             if entry["type"] == "deb":
1847                 poolfiles.append(add_deb_to_db(self, newfile, session))
1848
1849         # If this is a sourceful diff only upload that is moving
1850         # cross-component we need to copy the .orig files into the new
1851         # component too for the same reasons as above.
1852         if self.pkg.changes["architecture"].has_key("source"):
1853             for orig_file in self.pkg.orig_files.keys():
1854                 if not self.pkg.orig_files[orig_file].has_key("id"):
1855                     continue # Skip if it's not in the pool
1856                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1857                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1858                     continue # Skip if the location didn't change
1859
1860                 # Do the move
1861                 oldf = get_poolfile_by_id(orig_file_id, session)
1862                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1863                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1864                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1865
1866                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1867
1868                 # TODO: Care about size/md5sum collisions etc
1869                 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1870
1871                 if newf is None:
1872                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1873                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1874
1875                     # TODO: Check that there's only 1 here
1876                     source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1877                     dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1878                     dscf.poolfile_id = newf.file_id
1879                     session.add(dscf)
1880                     session.flush()
1881
1882                     poolfiles.append(newf)
1883
1884         # Install the files into the pool
1885         for newfile, entry in self.pkg.files.items():
1886             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1887             utils.move(newfile, destination)
1888             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1889             stats.accept_bytes += float(entry["size"])
1890
1891         # Copy the .changes file across for suite which need it.
1892         copy_changes = {}
1893         for suite_name in self.pkg.changes["distribution"].keys():
1894             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1895                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1896
1897         for dest in copy_changes.keys():
1898             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1899
1900         # We're done - commit the database changes
1901         session.commit()
1902         # Our SQL session will automatically start a new transaction after
1903         # the last commit
1904
1905         # Move the .changes into the 'done' directory
1906         utils.move(self.pkg.changes_file,
1907                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1908
1909         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1910             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1911
1912         # Send accept mail, announce to lists, close bugs and check for
1913         # override disparities
1914         if not cnf["Dinstall::Options::No-Mail"]:
1915             self.update_subst()
1916             self.Subst["__SUITE__"] = ""
1917             self.Subst["__SUMMARY__"] = summary
1918             mail_message = utils.TemplateSubst(self.Subst,
1919                                                os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1920             utils.send_mail(mail_message)
1921             self.announce(short_summary, 1)
1922
1923         ## Helper stuff for DebBugs Version Tracking
1924         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1925             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1926             # the conditionalization on dsc["bts changelog"] should be
1927             # dropped.
1928
1929             # Write out the version history from the changelog
1930             if self.pkg.changes["architecture"].has_key("source") and \
1931                self.pkg.dsc.has_key("bts changelog"):
1932
1933                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1934                 version_history = os.fdopen(fd, 'w')
1935                 version_history.write(self.pkg.dsc["bts changelog"])
1936                 version_history.close()
1937                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1938                                       self.pkg.changes_file[:-8]+".versions")
1939                 os.rename(temp_filename, filename)
1940                 os.chmod(filename, 0644)
1941
1942             # Write out the binary -> source mapping.
1943             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1944             debinfo = os.fdopen(fd, 'w')
1945             for name, entry in sorted(self.pkg.files.items()):
1946                 if entry["type"] == "deb":
1947                     line = " ".join([entry["package"], entry["version"],
1948                                      entry["architecture"], entry["source package"],
1949                                      entry["source version"]])
1950                     debinfo.write(line+"\n")
1951             debinfo.close()
1952             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1953                                   self.pkg.changes_file[:-8]+".debinfo")
1954             os.rename(temp_filename, filename)
1955             os.chmod(filename, 0644)
1956
1957         session.commit()
1958
1959         # Set up our copy queues (e.g. buildd queues)
1960         for suite_name in self.pkg.changes["distribution"].keys():
1961             suite = get_suite(suite_name, session)
1962             for q in suite.copy_queues:
1963                 for f in poolfiles:
1964                     q.add_file_from_pool(f)
1965
1966         session.commit()
1967
1968         # Finally...
1969         stats.accept_count += 1
1970
1971     def check_override(self):
1972         """
1973         Checks override entries for validity. Mails "Override disparity" warnings,
1974         if that feature is enabled.
1975
1976         Abandons the check if
1977           - override disparity checks are disabled
1978           - mail sending is disabled
1979         """
1980
1981         cnf = Config()
1982
1983         # Abandon the check if:
1984         #  a) override disparity checks have been disabled
1985         #  b) we're not sending mail
1986         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1987            cnf["Dinstall::Options::No-Mail"]:
1988             return
1989
1990         summary = self.pkg.check_override()
1991
1992         if summary == "":
1993             return
1994
1995         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1996
1997         self.update_subst()
1998         self.Subst["__SUMMARY__"] = summary
1999         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2000         utils.send_mail(mail_message)
2001         del self.Subst["__SUMMARY__"]
2002
2003     ###########################################################################
2004
2005     def remove(self, from_dir=None):
2006         """
2007         Used (for instance) in p-u to remove the package from unchecked
2008
2009         Also removes the package from holding area.
2010         """
2011         if from_dir is None:
2012             from_dir = self.pkg.directory
2013         h = Holding()
2014
2015         for f in self.pkg.files.keys():
2016             os.unlink(os.path.join(from_dir, f))
2017             if os.path.exists(os.path.join(h.holding_dir, f)):
2018                 os.unlink(os.path.join(h.holding_dir, f))
2019
2020         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2021         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2022             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2023
2024     ###########################################################################
2025
2026     def move_to_queue (self, queue):
2027         """
2028         Move files to a destination queue using the permissions in the table
2029         """
2030         h = Holding()
2031         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2032                    queue.path, perms=int(queue.change_perms, 8))
2033         for f in self.pkg.files.keys():
2034             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2035
2036     ###########################################################################
2037
2038     def force_reject(self, reject_files):
2039         """
2040         Forcefully move files from the current directory to the
2041         reject directory.  If any file already exists in the reject
2042         directory it will be moved to the morgue to make way for
2043         the new file.
2044
2045         @type files: dict
2046         @param files: file dictionary
2047
2048         """
2049
2050         cnf = Config()
2051
2052         for file_entry in reject_files:
2053             # Skip any files which don't exist or which we don't have permission to copy.
2054             if os.access(file_entry, os.R_OK) == 0:
2055                 continue
2056
2057             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2058
2059             try:
2060                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2061             except OSError, e:
2062                 # File exists?  Let's try and move it to the morgue
2063                 if e.errno == errno.EEXIST:
2064                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2065                     try:
2066                         morgue_file = utils.find_next_free(morgue_file)
2067                     except NoFreeFilenameError:
2068                         # Something's either gone badly Pete Tong, or
2069                         # someone is trying to exploit us.
2070                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2071                         return
2072                     utils.move(dest_file, morgue_file, perms=0660)
2073                     try:
2074                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2075                     except OSError, e:
2076                         # Likewise
2077                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2078                         return
2079                 else:
2080                     raise
2081             # If we got here, we own the destination file, so we can
2082             # safely overwrite it.
2083             utils.move(file_entry, dest_file, 1, perms=0660)
2084             os.close(dest_fd)
2085
2086     ###########################################################################
2087     def do_reject (self, manual=0, reject_message="", note=""):
2088         """
2089         Reject an upload. If called without a reject message or C{manual} is
2090         true, spawn an editor so the user can write one.
2091
2092         @type manual: bool
2093         @param manual: manual or automated rejection
2094
2095         @type reject_message: string
2096         @param reject_message: A reject message
2097
2098         @return: 0
2099
2100         """
2101         # If we weren't given a manual rejection message, spawn an
2102         # editor so the user can add one in...
2103         if manual and not reject_message:
2104             (fd, temp_filename) = utils.temp_filename()
2105             temp_file = os.fdopen(fd, 'w')
2106             if len(note) > 0:
2107                 for line in note:
2108                     temp_file.write(line)
2109             temp_file.close()
2110             editor = os.environ.get("EDITOR","vi")
2111             answer = 'E'
2112             while answer == 'E':
2113                 os.system("%s %s" % (editor, temp_filename))
2114                 temp_fh = utils.open_file(temp_filename)
2115                 reject_message = "".join(temp_fh.readlines())
2116                 temp_fh.close()
2117                 print "Reject message:"
2118                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2119                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2120                 answer = "XXX"
2121                 while prompt.find(answer) == -1:
2122                     answer = utils.our_raw_input(prompt)
2123                     m = re_default_answer.search(prompt)
2124                     if answer == "":
2125                         answer = m.group(1)
2126                     answer = answer[:1].upper()
2127             os.unlink(temp_filename)
2128             if answer == 'A':
2129                 return 1
2130             elif answer == 'Q':
2131                 sys.exit(0)
2132
2133         print "Rejecting.\n"
2134
2135         cnf = Config()
2136
2137         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2138         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2139
2140         # Move all the files into the reject directory
2141         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2142         self.force_reject(reject_files)
2143
2144         # If we fail here someone is probably trying to exploit the race
2145         # so let's just raise an exception ...
2146         if os.path.exists(reason_filename):
2147             os.unlink(reason_filename)
2148         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2149
2150         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2151
2152         self.update_subst()
2153         if not manual:
2154             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2155             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2156             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2157             os.write(reason_fd, reject_message)
2158             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2159         else:
2160             # Build up the rejection email
2161             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2162             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2163             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2164             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2165             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2166             # Write the rejection email out as the <foo>.reason file
2167             os.write(reason_fd, reject_mail_message)
2168
2169         del self.Subst["__REJECTOR_ADDRESS__"]
2170         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2171         del self.Subst["__CC__"]
2172
2173         os.close(reason_fd)
2174
2175         # Send the rejection mail if appropriate
2176         if not cnf["Dinstall::Options::No-Mail"]:
2177             utils.send_mail(reject_mail_message)
2178
2179         if self.logger:
2180             self.logger.log(["rejected", self.pkg.changes_file])
2181
2182         return 0
2183
2184     ################################################################################
2185     def in_override_p(self, package, component, suite, binary_type, filename, session):
2186         """
2187         Check if a package already has override entries in the DB
2188
2189         @type package: string
2190         @param package: package name
2191
2192         @type component: string
2193         @param component: database id of the component
2194
2195         @type suite: int
2196         @param suite: database id of the suite
2197
2198         @type binary_type: string
2199         @param binary_type: type of the package
2200
2201         @type filename: string
2202         @param filename: filename we check
2203
2204         @return: the database result. But noone cares anyway.
2205
2206         """
2207
2208         cnf = Config()
2209
2210         if binary_type == "": # must be source
2211             file_type = "dsc"
2212         else:
2213             file_type = binary_type
2214
2215         # Override suite name; used for example with proposed-updates
2216         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2217             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2218
2219         result = get_override(package, suite, component, file_type, session)
2220
2221         # If checking for a source package fall back on the binary override type
2222         if file_type == "dsc" and len(result) < 1:
2223             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2224
2225         # Remember the section and priority so we can check them later if appropriate
2226         if len(result) > 0:
2227             result = result[0]
2228             self.pkg.files[filename]["override section"] = result.section.section
2229             self.pkg.files[filename]["override priority"] = result.priority.priority
2230             return result
2231
2232         return None
2233
2234     ################################################################################
2235     def get_anyversion(self, sv_list, suite):
2236         """
2237         @type sv_list: list
2238         @param sv_list: list of (suite, version) tuples to check
2239
2240         @type suite: string
2241         @param suite: suite name
2242
2243         Description: TODO
2244         """
2245         Cnf = Config()
2246         anyversion = None
2247         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2248         for (s, v) in sv_list:
2249             if s in [ x.lower() for x in anysuite ]:
2250                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2251                     anyversion = v
2252
2253         return anyversion
2254
2255     ################################################################################
2256
2257     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2258         """
2259         @type sv_list: list
2260         @param sv_list: list of (suite, version) tuples to check
2261
2262         @type filename: string
2263         @param filename: XXX
2264
2265         @type new_version: string
2266         @param new_version: XXX
2267
2268         Ensure versions are newer than existing packages in target
2269         suites and that cross-suite version checking rules as
2270         set out in the conf file are satisfied.
2271         """
2272
2273         cnf = Config()
2274
2275         # Check versions for each target suite
2276         for target_suite in self.pkg.changes["distribution"].keys():
2277             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2278             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2279
2280             # Enforce "must be newer than target suite" even if conffile omits it
2281             if target_suite not in must_be_newer_than:
2282                 must_be_newer_than.append(target_suite)
2283
2284             for (suite, existent_version) in sv_list:
2285                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2286
2287                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2288                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2289
2290                 if suite in must_be_older_than and vercmp > -1:
2291                     cansave = 0
2292
2293                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2294                         # we really use the other suite, ignoring the conflicting one ...
2295                         addsuite = self.pkg.changes["distribution-version"][suite]
2296
2297                         add_version = self.get_anyversion(sv_list, addsuite)
2298                         target_version = self.get_anyversion(sv_list, target_suite)
2299
2300                         if not add_version:
2301                             # not add_version can only happen if we map to a suite
2302                             # that doesn't enhance the suite we're propup'ing from.
2303                             # so "propup-ver x a b c; map a d" is a problem only if
2304                             # d doesn't enhance a.
2305                             #
2306                             # i think we could always propagate in this case, rather
2307                             # than complaining. either way, this isn't a REJECT issue
2308                             #
2309                             # And - we really should complain to the dorks who configured dak
2310                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2311                             self.pkg.changes.setdefault("propdistribution", {})
2312                             self.pkg.changes["propdistribution"][addsuite] = 1
2313                             cansave = 1
2314                         elif not target_version:
2315                             # not targets_version is true when the package is NEW
2316                             # we could just stick with the "...old version..." REJECT
2317                             # for this, I think.
2318                             self.rejects.append("Won't propogate NEW packages.")
2319                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2320                             # propogation would be redundant. no need to reject though.
2321                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2322                             cansave = 1
2323                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2324                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2325                             # propogate!!
2326                             self.warnings.append("Propogating upload to %s" % (addsuite))
2327                             self.pkg.changes.setdefault("propdistribution", {})
2328                             self.pkg.changes["propdistribution"][addsuite] = 1
2329                             cansave = 1
2330
2331                     if not cansave:
2332                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2333
2334     ################################################################################
2335     def check_binary_against_db(self, filename, session):
2336         # Ensure version is sane
2337         q = session.query(BinAssociation)
2338         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2339         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2340
2341         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2342                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2343
2344         # Check for any existing copies of the file
2345         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2346         q = q.filter_by(version=self.pkg.files[filename]["version"])
2347         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2348
2349         if q.count() > 0:
2350             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2351
2352     ################################################################################
2353
2354     def check_source_against_db(self, filename, session):
2355         """
2356         """
2357         source = self.pkg.dsc.get("source")
2358         version = self.pkg.dsc.get("version")
2359
2360         # Ensure version is sane
2361         q = session.query(SrcAssociation)
2362         q = q.join(DBSource).filter(DBSource.source==source)
2363
2364         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2365                                        filename, version, sourceful=True)
2366
2367     ################################################################################
2368     def check_dsc_against_db(self, filename, session):
2369         """
2370
2371         @warning: NB: this function can remove entries from the 'files' index [if
2372          the orig tarball is a duplicate of the one in the archive]; if
2373          you're iterating over 'files' and call this function as part of
2374          the loop, be sure to add a check to the top of the loop to
2375          ensure you haven't just tried to dereference the deleted entry.
2376
2377         """
2378
2379         Cnf = Config()
2380         self.pkg.orig_files = {} # XXX: do we need to clear it?
2381         orig_files = self.pkg.orig_files
2382
2383         # Try and find all files mentioned in the .dsc.  This has
2384         # to work harder to cope with the multiple possible
2385         # locations of an .orig.tar.gz.
2386         # The ordering on the select is needed to pick the newest orig
2387         # when it exists in multiple places.
2388         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2389             found = None
2390             if self.pkg.files.has_key(dsc_name):
2391                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2392                 actual_size = int(self.pkg.files[dsc_name]["size"])
2393                 found = "%s in incoming" % (dsc_name)
2394
2395                 # Check the file does not already exist in the archive
2396                 ql = get_poolfile_like_name(dsc_name, session)
2397
2398                 # Strip out anything that isn't '%s' or '/%s$'
2399                 for i in ql:
2400                     if not i.filename.endswith(dsc_name):
2401                         ql.remove(i)
2402
2403                 # "[dak] has not broken them.  [dak] has fixed a
2404                 # brokenness.  Your crappy hack exploited a bug in
2405                 # the old dinstall.
2406                 #
2407                 # "(Come on!  I thought it was always obvious that
2408                 # one just doesn't release different files with
2409                 # the same name and version.)"
2410                 #                        -- ajk@ on d-devel@l.d.o
2411
2412                 if len(ql) > 0:
2413                     # Ignore exact matches for .orig.tar.gz
2414                     match = 0
2415                     if re_is_orig_source.match(dsc_name):
2416                         for i in ql:
2417                             if self.pkg.files.has_key(dsc_name) and \
2418                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2419                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2420                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2421                                 # TODO: Don't delete the entry, just mark it as not needed
2422                                 # This would fix the stupidity of changing something we often iterate over
2423                                 # whilst we're doing it
2424                                 del self.pkg.files[dsc_name]
2425                                 dsc_entry["files id"] = i.file_id
2426                                 if not orig_files.has_key(dsc_name):
2427                                     orig_files[dsc_name] = {}
2428                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2429                                 match = 1
2430
2431                     if not match:
2432                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2433
2434             elif re_is_orig_source.match(dsc_name):
2435                 # Check in the pool
2436                 ql = get_poolfile_like_name(dsc_name, session)
2437
2438                 # Strip out anything that isn't '%s' or '/%s$'
2439                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2440                 for i in ql:
2441                     if not i.filename.endswith(dsc_name):
2442                         ql.remove(i)
2443
2444                 if len(ql) > 0:
2445                     # Unfortunately, we may get more than one match here if,
2446                     # for example, the package was in potato but had an -sa
2447                     # upload in woody.  So we need to choose the right one.
2448
2449                     # default to something sane in case we don't match any or have only one
2450                     x = ql[0]
2451
2452                     if len(ql) > 1:
2453                         for i in ql:
2454                             old_file = os.path.join(i.location.path, i.filename)
2455                             old_file_fh = utils.open_file(old_file)
2456                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2457                             old_file_fh.close()
2458                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2459                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2460                                 x = i
2461
2462                     old_file = os.path.join(i.location.path, i.filename)
2463                     old_file_fh = utils.open_file(old_file)
2464                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2465                     old_file_fh.close()
2466                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2467                     found = old_file
2468                     suite_type = x.location.archive_type
2469                     # need this for updating dsc_files in install()
2470                     dsc_entry["files id"] = x.file_id
2471                     # See install() in process-accepted...
2472                     if not orig_files.has_key(dsc_name):
2473                         orig_files[dsc_name] = {}
2474                     orig_files[dsc_name]["id"] = x.file_id
2475                     orig_files[dsc_name]["path"] = old_file
2476                     orig_files[dsc_name]["location"] = x.location.location_id
2477                 else:
2478                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2479                     # Not there? Check the queue directories...
2480                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2481                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2482                             continue
2483                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2484                         if os.path.exists(in_otherdir):
2485                             in_otherdir_fh = utils.open_file(in_otherdir)
2486                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2487                             in_otherdir_fh.close()
2488                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2489                             found = in_otherdir
2490                             if not orig_files.has_key(dsc_name):
2491                                 orig_files[dsc_name] = {}
2492                             orig_files[dsc_name]["path"] = in_otherdir
2493
2494                     if not found:
2495                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2496                         continue
2497             else:
2498                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2499                 continue
2500             if actual_md5 != dsc_entry["md5sum"]:
2501                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2502             if actual_size != int(dsc_entry["size"]):
2503                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2504
2505     ################################################################################
2506     # This is used by process-new and process-holding to recheck a changes file
2507     # at the time we're running.  It mainly wraps various other internal functions
2508     # and is similar to accepted_checks - these should probably be tidied up
2509     # and combined
2510     def recheck(self, session):
2511         cnf = Config()
2512         for f in self.pkg.files.keys():
2513             # The .orig.tar.gz can disappear out from under us is it's a
2514             # duplicate of one in the archive.
2515             if not self.pkg.files.has_key(f):
2516                 continue
2517
2518             entry = self.pkg.files[f]
2519
2520             # Check that the source still exists
2521             if entry["type"] == "deb":
2522                 source_version = entry["source version"]
2523                 source_package = entry["source package"]
2524                 if not self.pkg.changes["architecture"].has_key("source") \
2525                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2526                     source_epochless_version = re_no_epoch.sub('', source_version)
2527                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2528                     found = False
2529                     for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2530                         if cnf.has_key("Dir::Queue::%s" % (q)):
2531                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2532                                 found = True
2533                     if not found:
2534                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2535
2536             # Version and file overwrite checks
2537             if entry["type"] == "deb":
2538                 self.check_binary_against_db(f, session)
2539             elif entry["type"] == "dsc":
2540                 self.check_source_against_db(f, session)
2541                 self.check_dsc_against_db(f, session)
2542
2543     ################################################################################
2544     def accepted_checks(self, overwrite_checks, session):
2545         # Recheck anything that relies on the database; since that's not
2546         # frozen between accept and our run time when called from p-a.
2547
2548         # overwrite_checks is set to False when installing to stable/oldstable
2549
2550         propogate={}
2551         nopropogate={}
2552
2553         # Find the .dsc (again)
2554         dsc_filename = None
2555         for f in self.pkg.files.keys():
2556             if self.pkg.files[f]["type"] == "dsc":
2557                 dsc_filename = f
2558
2559         for checkfile in self.pkg.files.keys():
2560             # The .orig.tar.gz can disappear out from under us is it's a
2561             # duplicate of one in the archive.
2562             if not self.pkg.files.has_key(checkfile):
2563                 continue
2564
2565             entry = self.pkg.files[checkfile]
2566
2567             # Check that the source still exists
2568             if entry["type"] == "deb":
2569                 source_version = entry["source version"]
2570                 source_package = entry["source package"]
2571                 if not self.pkg.changes["architecture"].has_key("source") \
2572                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2573                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2574
2575             # Version and file overwrite checks
2576             if overwrite_checks:
2577                 if entry["type"] == "deb":
2578                     self.check_binary_against_db(checkfile, session)
2579                 elif entry["type"] == "dsc":
2580                     self.check_source_against_db(checkfile, session)
2581                     self.check_dsc_against_db(dsc_filename, session)
2582
2583             # propogate in the case it is in the override tables:
2584             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2585                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2586                     propogate[suite] = 1
2587                 else:
2588                     nopropogate[suite] = 1
2589
2590         for suite in propogate.keys():
2591             if suite in nopropogate:
2592                 continue
2593             self.pkg.changes["distribution"][suite] = 1
2594
2595         for checkfile in self.pkg.files.keys():
2596             # Check the package is still in the override tables
2597             for suite in self.pkg.changes["distribution"].keys():
2598                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2599                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2600
2601     ################################################################################
2602     # This is not really a reject, but an unaccept, but since a) the code for
2603     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2604     # extremely rare, for now we'll go with whining at our admin folks...
2605
2606     def do_unaccept(self):
2607         cnf = Config()
2608
2609         self.update_subst()
2610         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2611         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2612         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2613         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2614         if cnf.has_key("Dinstall::Bcc"):
2615             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2616
2617         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2618
2619         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2620
2621         # Write the rejection email out as the <foo>.reason file
2622         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2623         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2624
2625         # If we fail here someone is probably trying to exploit the race
2626         # so let's just raise an exception ...
2627         if os.path.exists(reject_filename):
2628             os.unlink(reject_filename)
2629
2630         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2631         os.write(fd, reject_mail_message)
2632         os.close(fd)
2633
2634         utils.send_mail(reject_mail_message)
2635
2636         del self.Subst["__REJECTOR_ADDRESS__"]
2637         del self.Subst["__REJECT_MESSAGE__"]
2638         del self.Subst["__CC__"]
2639
2640     ################################################################################
2641     # If any file of an upload has a recent mtime then chances are good
2642     # the file is still being uploaded.
2643
2644     def upload_too_new(self):
2645         cnf = Config()
2646         too_new = False
2647         # Move back to the original directory to get accurate time stamps
2648         cwd = os.getcwd()
2649         os.chdir(self.pkg.directory)
2650         file_list = self.pkg.files.keys()
2651         file_list.extend(self.pkg.dsc_files.keys())
2652         file_list.append(self.pkg.changes_file)
2653         for f in file_list:
2654             try:
2655                 last_modified = time.time()-os.path.getmtime(f)
2656                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2657                     too_new = True
2658                     break
2659             except:
2660                 pass
2661
2662         os.chdir(cwd)
2663         return too_new