]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote branch 'twerner/show-new' into merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1, session = None):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     # Build up a list of potentially new things
114     for name, f in files.items():
115         # Skip byhand elements
116 #        if f["type"] == "byhand":
117 #            continue
118         pkg = f["package"]
119         priority = f["priority"]
120         section = f["section"]
121         file_type = get_type(f, session)
122         component = f["component"]
123
124         if file_type == "dsc":
125             priority = "source"
126
127         if not new.has_key(pkg):
128             new[pkg] = {}
129             new[pkg]["priority"] = priority
130             new[pkg]["section"] = section
131             new[pkg]["type"] = file_type
132             new[pkg]["component"] = component
133             new[pkg]["files"] = []
134         else:
135             old_type = new[pkg]["type"]
136             if old_type != file_type:
137                 # source gets trumped by deb or udeb
138                 if old_type == "dsc":
139                     new[pkg]["priority"] = priority
140                     new[pkg]["section"] = section
141                     new[pkg]["type"] = file_type
142                     new[pkg]["component"] = component
143
144         new[pkg]["files"].append(name)
145
146         if f.has_key("othercomponents"):
147             new[pkg]["othercomponents"] = f["othercomponents"]
148
149     # Fix up the list of target suites
150     cnf = Config()
151     for suite in changes["suite"].keys():
152         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
153         if override:
154             (olderr, newerr) = (get_suite(suite, session) == None,
155                                 get_suite(override, session) == None)
156             if olderr or newerr:
157                 (oinv, newinv) = ("", "")
158                 if olderr: oinv = "invalid "
159                 if newerr: ninv = "invalid "
160                 print "warning: overriding %ssuite %s to %ssuite %s" % (
161                         oinv, suite, ninv, override)
162             del changes["suite"][suite]
163             changes["suite"][override] = 1
164
165     for suite in changes["suite"].keys():
166         for pkg in new.keys():
167             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
168             if len(ql) > 0:
169                 for file_entry in new[pkg]["files"]:
170                     if files[file_entry].has_key("new"):
171                         del files[file_entry]["new"]
172                 del new[pkg]
173
174     if warn:
175         for s in ['stable', 'oldstable']:
176             if changes["suite"].has_key(s):
177                 print "WARNING: overrides will be added for %s!" % s
178         for pkg in new.keys():
179             if new[pkg].has_key("othercomponents"):
180                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
181
182     return new
183
184 ################################################################################
185
186 def check_valid(new, session = None):
187     """
188     Check if section and priority for NEW packages exist in database.
189     Additionally does sanity checks:
190       - debian-installer packages have to be udeb (or source)
191       - non debian-installer packages can not be udeb
192       - source priority can only be assigned to dsc file types
193
194     @type new: dict
195     @param new: Dict of new packages with their section, priority and type.
196
197     """
198     for pkg in new.keys():
199         section_name = new[pkg]["section"]
200         priority_name = new[pkg]["priority"]
201         file_type = new[pkg]["type"]
202
203         section = get_section(section_name, session)
204         if section is None:
205             new[pkg]["section id"] = -1
206         else:
207             new[pkg]["section id"] = section.section_id
208
209         priority = get_priority(priority_name, session)
210         if priority is None:
211             new[pkg]["priority id"] = -1
212         else:
213             new[pkg]["priority id"] = priority.priority_id
214
215         # Sanity checks
216         di = section_name.find("debian-installer") != -1
217
218         # If d-i, we must be udeb and vice-versa
219         if     (di and file_type not in ("udeb", "dsc")) or \
220            (not di and file_type == "udeb"):
221             new[pkg]["section id"] = -1
222
223         # If dsc we need to be source and vice-versa
224         if (priority == "source" and file_type != "dsc") or \
225            (priority != "source" and file_type == "dsc"):
226             new[pkg]["priority id"] = -1
227
228 ###############################################################################
229
230 # Used by Upload.check_timestamps
231 class TarTime(object):
232     def __init__(self, future_cutoff, past_cutoff):
233         self.reset()
234         self.future_cutoff = future_cutoff
235         self.past_cutoff = past_cutoff
236
237     def reset(self):
238         self.future_files = {}
239         self.ancient_files = {}
240
241     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
242         if MTime > self.future_cutoff:
243             self.future_files[Name] = MTime
244         if MTime < self.past_cutoff:
245             self.ancient_files[Name] = MTime
246
247 ###############################################################################
248
249 class Upload(object):
250     """
251     Everything that has to do with an upload processed.
252
253     """
254     def __init__(self):
255         self.logger = None
256         self.pkg = Changes()
257         self.reset()
258
259     ###########################################################################
260
261     def reset (self):
262         """ Reset a number of internal variables."""
263
264         # Initialize the substitution template map
265         cnf = Config()
266         self.Subst = {}
267         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
268         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
269         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
270         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
271
272         self.rejects = []
273         self.warnings = []
274         self.notes = []
275
276         self.later_check_files = []
277
278         self.pkg.reset()
279
280     def package_info(self):
281         """
282         Format various messages from this Upload to send to the maintainer.
283         """
284
285         msgs = (
286             ('Reject Reasons', self.rejects),
287             ('Warnings', self.warnings),
288             ('Notes', self.notes),
289         )
290
291         msg = ''
292         for title, messages in msgs:
293             if messages:
294                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
295         msg += '\n\n'
296
297         return msg
298
299     ###########################################################################
300     def update_subst(self):
301         """ Set up the per-package template substitution mappings """
302
303         cnf = Config()
304
305         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
306         if not self.pkg.changes.has_key("architecture") or not \
307            isinstance(self.pkg.changes["architecture"], dict):
308             self.pkg.changes["architecture"] = { "Unknown" : "" }
309
310         # and maintainer2047 may not exist.
311         if not self.pkg.changes.has_key("maintainer2047"):
312             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
313
314         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
315         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
316         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
317
318         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
319         if self.pkg.changes["architecture"].has_key("source") and \
320            self.pkg.changes["changedby822"] != "" and \
321            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
322
323             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
324             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
325             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
326         else:
327             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
328             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
329             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
330
331         if "sponsoremail" in self.pkg.changes:
332             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
333
334         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
335             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
336
337         # Apply any global override of the Maintainer field
338         if cnf.get("Dinstall::OverrideMaintainer"):
339             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
340             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
341
342         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
343         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
344         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
345
346     ###########################################################################
347     def load_changes(self, filename):
348         """
349         Load a changes file and setup a dictionary around it. Also checks for mandantory
350         fields  within.
351
352         @type filename: string
353         @param filename: Changes filename, full path.
354
355         @rtype: boolean
356         @return: whether the changes file was valid or not.  We may want to
357                  reject even if this is True (see what gets put in self.rejects).
358                  This is simply to prevent us even trying things later which will
359                  fail because we couldn't properly parse the file.
360         """
361         Cnf = Config()
362         self.pkg.changes_file = filename
363
364         # Parse the .changes field into a dictionary
365         try:
366             self.pkg.changes.update(parse_changes(filename))
367         except CantOpenError:
368             self.rejects.append("%s: can't read file." % (filename))
369             return False
370         except ParseChangesError, line:
371             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
372             return False
373         except ChangesUnicodeError:
374             self.rejects.append("%s: changes file not proper utf-8" % (filename))
375             return False
376
377         # Parse the Files field from the .changes into another dictionary
378         try:
379             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
380         except ParseChangesError, line:
381             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
382             return False
383         except UnknownFormatError, format:
384             self.rejects.append("%s: unknown format '%s'." % (filename, format))
385             return False
386
387         # Check for mandatory fields
388         for i in ("distribution", "source", "binary", "architecture",
389                   "version", "maintainer", "files", "changes", "description"):
390             if not self.pkg.changes.has_key(i):
391                 # Avoid undefined errors later
392                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
393                 return False
394
395         # Strip a source version in brackets from the source field
396         if re_strip_srcver.search(self.pkg.changes["source"]):
397             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
398
399         # Ensure the source field is a valid package name.
400         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
401             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
402
403         # Split multi-value fields into a lower-level dictionary
404         for i in ("architecture", "distribution", "binary", "closes"):
405             o = self.pkg.changes.get(i, "")
406             if o != "":
407                 del self.pkg.changes[i]
408
409             self.pkg.changes[i] = {}
410
411             for j in o.split():
412                 self.pkg.changes[i][j] = 1
413
414         # Fix the Maintainer: field to be RFC822/2047 compatible
415         try:
416             (self.pkg.changes["maintainer822"],
417              self.pkg.changes["maintainer2047"],
418              self.pkg.changes["maintainername"],
419              self.pkg.changes["maintaineremail"]) = \
420                    fix_maintainer (self.pkg.changes["maintainer"])
421         except ParseMaintError, msg:
422             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
423                    % (filename, self.pkg.changes["maintainer"], msg))
424
425         # ...likewise for the Changed-By: field if it exists.
426         try:
427             (self.pkg.changes["changedby822"],
428              self.pkg.changes["changedby2047"],
429              self.pkg.changes["changedbyname"],
430              self.pkg.changes["changedbyemail"]) = \
431                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
432         except ParseMaintError, msg:
433             self.pkg.changes["changedby822"] = ""
434             self.pkg.changes["changedby2047"] = ""
435             self.pkg.changes["changedbyname"] = ""
436             self.pkg.changes["changedbyemail"] = ""
437
438             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
439                    % (filename, self.pkg.changes["changed-by"], msg))
440
441         # Ensure all the values in Closes: are numbers
442         if self.pkg.changes.has_key("closes"):
443             for i in self.pkg.changes["closes"].keys():
444                 if re_isanum.match (i) == None:
445                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
446
447         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
448         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
449         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
450
451         # Check the .changes is non-empty
452         if not self.pkg.files:
453             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
454             return False
455
456         # Changes was syntactically valid even if we'll reject
457         return True
458
459     ###########################################################################
460
461     def check_distributions(self):
462         "Check and map the Distribution field"
463
464         Cnf = Config()
465
466         # Handle suite mappings
467         for m in Cnf.ValueList("SuiteMappings"):
468             args = m.split()
469             mtype = args[0]
470             if mtype == "map" or mtype == "silent-map":
471                 (source, dest) = args[1:3]
472                 if self.pkg.changes["distribution"].has_key(source):
473                     del self.pkg.changes["distribution"][source]
474                     self.pkg.changes["distribution"][dest] = 1
475                     if mtype != "silent-map":
476                         self.notes.append("Mapping %s to %s." % (source, dest))
477                 if self.pkg.changes.has_key("distribution-version"):
478                     if self.pkg.changes["distribution-version"].has_key(source):
479                         self.pkg.changes["distribution-version"][source]=dest
480             elif mtype == "map-unreleased":
481                 (source, dest) = args[1:3]
482                 if self.pkg.changes["distribution"].has_key(source):
483                     for arch in self.pkg.changes["architecture"].keys():
484                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
485                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
486                             del self.pkg.changes["distribution"][source]
487                             self.pkg.changes["distribution"][dest] = 1
488                             break
489             elif mtype == "ignore":
490                 suite = args[1]
491                 if self.pkg.changes["distribution"].has_key(suite):
492                     del self.pkg.changes["distribution"][suite]
493                     self.warnings.append("Ignoring %s as a target suite." % (suite))
494             elif mtype == "reject":
495                 suite = args[1]
496                 if self.pkg.changes["distribution"].has_key(suite):
497                     self.rejects.append("Uploads to %s are not accepted." % (suite))
498             elif mtype == "propup-version":
499                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
500                 #
501                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
502                 if self.pkg.changes["distribution"].has_key(args[1]):
503                     self.pkg.changes.setdefault("distribution-version", {})
504                     for suite in args[2:]:
505                         self.pkg.changes["distribution-version"][suite] = suite
506
507         # Ensure there is (still) a target distribution
508         if len(self.pkg.changes["distribution"].keys()) < 1:
509             self.rejects.append("No valid distribution remaining.")
510
511         # Ensure target distributions exist
512         for suite in self.pkg.changes["distribution"].keys():
513             if not Cnf.has_key("Suite::%s" % (suite)):
514                 self.rejects.append("Unknown distribution `%s'." % (suite))
515
516     ###########################################################################
517
518     def binary_file_checks(self, f, session):
519         cnf = Config()
520         entry = self.pkg.files[f]
521
522         # Extract package control information
523         deb_file = utils.open_file(f)
524         try:
525             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
526         except:
527             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
528             deb_file.close()
529             # Can't continue, none of the checks on control would work.
530             return
531
532         # Check for mandantory "Description:"
533         deb_file.seek(0)
534         try:
535             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
536         except:
537             self.rejects.append("%s: Missing Description in binary package" % (f))
538             return
539
540         deb_file.close()
541
542         # Check for mandatory fields
543         for field in [ "Package", "Architecture", "Version" ]:
544             if control.Find(field) == None:
545                 # Can't continue
546                 self.rejects.append("%s: No %s field in control." % (f, field))
547                 return
548
549         # Ensure the package name matches the one give in the .changes
550         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
551             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
552
553         # Validate the package field
554         package = control.Find("Package")
555         if not re_valid_pkg_name.match(package):
556             self.rejects.append("%s: invalid package name '%s'." % (f, package))
557
558         # Validate the version field
559         version = control.Find("Version")
560         if not re_valid_version.match(version):
561             self.rejects.append("%s: invalid version number '%s'." % (f, version))
562
563         # Ensure the architecture of the .deb is one we know about.
564         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
565         architecture = control.Find("Architecture")
566         upload_suite = self.pkg.changes["distribution"].keys()[0]
567
568         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
569             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
570             self.rejects.append("Unknown architecture '%s'." % (architecture))
571
572         # Ensure the architecture of the .deb is one of the ones
573         # listed in the .changes.
574         if not self.pkg.changes["architecture"].has_key(architecture):
575             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
576
577         # Sanity-check the Depends field
578         depends = control.Find("Depends")
579         if depends == '':
580             self.rejects.append("%s: Depends field is empty." % (f))
581
582         # Sanity-check the Provides field
583         provides = control.Find("Provides")
584         if provides:
585             provide = re_spacestrip.sub('', provides)
586             if provide == '':
587                 self.rejects.append("%s: Provides field is empty." % (f))
588             prov_list = provide.split(",")
589             for prov in prov_list:
590                 if not re_valid_pkg_name.match(prov):
591                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
592
593         # Check the section & priority match those given in the .changes (non-fatal)
594         if     control.Find("Section") and entry["section"] != "" \
595            and entry["section"] != control.Find("Section"):
596             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
597                                 (f, control.Find("Section", ""), entry["section"]))
598         if control.Find("Priority") and entry["priority"] != "" \
599            and entry["priority"] != control.Find("Priority"):
600             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
601                                 (f, control.Find("Priority", ""), entry["priority"]))
602
603         entry["package"] = package
604         entry["architecture"] = architecture
605         entry["version"] = version
606         entry["maintainer"] = control.Find("Maintainer", "")
607
608         if f.endswith(".udeb"):
609             self.pkg.files[f]["dbtype"] = "udeb"
610         elif f.endswith(".deb"):
611             self.pkg.files[f]["dbtype"] = "deb"
612         else:
613             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
614
615         entry["source"] = control.Find("Source", entry["package"])
616
617         # Get the source version
618         source = entry["source"]
619         source_version = ""
620
621         if source.find("(") != -1:
622             m = re_extract_src_version.match(source)
623             source = m.group(1)
624             source_version = m.group(2)
625
626         if not source_version:
627             source_version = self.pkg.files[f]["version"]
628
629         entry["source package"] = source
630         entry["source version"] = source_version
631
632         # Ensure the filename matches the contents of the .deb
633         m = re_isadeb.match(f)
634
635         #  package name
636         file_package = m.group(1)
637         if entry["package"] != file_package:
638             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
639                                 (f, file_package, entry["dbtype"], entry["package"]))
640         epochless_version = re_no_epoch.sub('', control.Find("Version"))
641
642         #  version
643         file_version = m.group(2)
644         if epochless_version != file_version:
645             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
646                                 (f, file_version, entry["dbtype"], epochless_version))
647
648         #  architecture
649         file_architecture = m.group(3)
650         if entry["architecture"] != file_architecture:
651             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
652                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
653
654         # Check for existent source
655         source_version = entry["source version"]
656         source_package = entry["source package"]
657         if self.pkg.changes["architecture"].has_key("source"):
658             if source_version != self.pkg.changes["version"]:
659                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
660                                     (source_version, f, self.pkg.changes["version"]))
661         else:
662             # Check in the SQL database
663             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
664                 # Check in one of the other directories
665                 source_epochless_version = re_no_epoch.sub('', source_version)
666                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
667                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
668                     entry["byhand"] = 1
669                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
670                     entry["new"] = 1
671                 else:
672                     dsc_file_exists = False
673                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
674                         if cnf.has_key("Dir::Queue::%s" % (myq)):
675                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
676                                 dsc_file_exists = True
677                                 break
678
679                     if not dsc_file_exists:
680                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
681
682         # Check the version and for file overwrites
683         self.check_binary_against_db(f, session)
684
685         # Temporarily disable contents generation until we change the table storage layout
686         #b = Binary(f)
687         #b.scan_package()
688         #if len(b.rejects) > 0:
689         #    for j in b.rejects:
690         #        self.rejects.append(j)
691
692     def source_file_checks(self, f, session):
693         entry = self.pkg.files[f]
694
695         m = re_issource.match(f)
696         if not m:
697             return
698
699         entry["package"] = m.group(1)
700         entry["version"] = m.group(2)
701         entry["type"] = m.group(3)
702
703         # Ensure the source package name matches the Source filed in the .changes
704         if self.pkg.changes["source"] != entry["package"]:
705             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
706
707         # Ensure the source version matches the version in the .changes file
708         if re_is_orig_source.match(f):
709             changes_version = self.pkg.changes["chopversion2"]
710         else:
711             changes_version = self.pkg.changes["chopversion"]
712
713         if changes_version != entry["version"]:
714             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
715
716         # Ensure the .changes lists source in the Architecture field
717         if not self.pkg.changes["architecture"].has_key("source"):
718             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
719
720         # Check the signature of a .dsc file
721         if entry["type"] == "dsc":
722             # check_signature returns either:
723             #  (None, [list, of, rejects]) or (signature, [])
724             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
725             for j in rejects:
726                 self.rejects.append(j)
727
728         entry["architecture"] = "source"
729
730     def per_suite_file_checks(self, f, suite, session):
731         cnf = Config()
732         entry = self.pkg.files[f]
733
734         # Skip byhand
735         if entry.has_key("byhand"):
736             return
737
738         # Check we have fields we need to do these checks
739         oktogo = True
740         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
741             if not entry.has_key(m):
742                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
743                 oktogo = False
744
745         if not oktogo:
746             return
747
748         # Handle component mappings
749         for m in cnf.ValueList("ComponentMappings"):
750             (source, dest) = m.split()
751             if entry["component"] == source:
752                 entry["original component"] = source
753                 entry["component"] = dest
754
755         # Ensure the component is valid for the target suite
756         if cnf.has_key("Suite:%s::Components" % (suite)) and \
757            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
758             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
759             return
760
761         # Validate the component
762         if not get_component(entry["component"], session):
763             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
764             return
765
766         # See if the package is NEW
767         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
768             entry["new"] = 1
769
770         # Validate the priority
771         if entry["priority"].find('/') != -1:
772             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
773
774         # Determine the location
775         location = cnf["Dir::Pool"]
776         l = get_location(location, entry["component"], session=session)
777         if l is None:
778             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
779             entry["location id"] = -1
780         else:
781             entry["location id"] = l.location_id
782
783         # Check the md5sum & size against existing files (if any)
784         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
785
786         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
787                                          entry["size"], entry["md5sum"], entry["location id"])
788
789         if found is None:
790             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
791         elif found is False and poolfile is not None:
792             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
793         else:
794             if poolfile is None:
795                 entry["files id"] = None
796             else:
797                 entry["files id"] = poolfile.file_id
798
799         # Check for packages that have moved from one component to another
800         entry['suite'] = suite
801         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
802         if res.rowcount > 0:
803             entry["othercomponents"] = res.fetchone()[0]
804
805     def check_files(self, action=True):
806         file_keys = self.pkg.files.keys()
807         holding = Holding()
808         cnf = Config()
809
810         if action:
811             cwd = os.getcwd()
812             os.chdir(self.pkg.directory)
813             for f in file_keys:
814                 ret = holding.copy_to_holding(f)
815                 if ret is not None:
816                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
817
818             os.chdir(cwd)
819
820         # check we already know the changes file
821         # [NB: this check must be done post-suite mapping]
822         base_filename = os.path.basename(self.pkg.changes_file)
823
824         session = DBConn().session()
825
826         try:
827             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
828             # if in the pool or in a queue other than unchecked, reject
829             if (dbc.in_queue is None) \
830                    or (dbc.in_queue is not None
831                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
832                 self.rejects.append("%s file already known to dak" % base_filename)
833         except NoResultFound, e:
834             # not known, good
835             pass
836
837         has_binaries = False
838         has_source = False
839
840         for f, entry in self.pkg.files.items():
841             # Ensure the file does not already exist in one of the accepted directories
842             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
843                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
844                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
845                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
846
847             if not re_taint_free.match(f):
848                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
849
850             # Check the file is readable
851             if os.access(f, os.R_OK) == 0:
852                 # When running in -n, copy_to_holding() won't have
853                 # generated the reject_message, so we need to.
854                 if action:
855                     if os.path.exists(f):
856                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
857                     else:
858                         # Don't directly reject, mark to check later to deal with orig's
859                         # we can find in the pool
860                         self.later_check_files.append(f)
861                 entry["type"] = "unreadable"
862                 continue
863
864             # If it's byhand skip remaining checks
865             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
866                 entry["byhand"] = 1
867                 entry["type"] = "byhand"
868
869             # Checks for a binary package...
870             elif re_isadeb.match(f):
871                 has_binaries = True
872                 entry["type"] = "deb"
873
874                 # This routine appends to self.rejects/warnings as appropriate
875                 self.binary_file_checks(f, session)
876
877             # Checks for a source package...
878             elif re_issource.match(f):
879                 has_source = True
880
881                 # This routine appends to self.rejects/warnings as appropriate
882                 self.source_file_checks(f, session)
883
884             # Not a binary or source package?  Assume byhand...
885             else:
886                 entry["byhand"] = 1
887                 entry["type"] = "byhand"
888
889             # Per-suite file checks
890             entry["oldfiles"] = {}
891             for suite in self.pkg.changes["distribution"].keys():
892                 self.per_suite_file_checks(f, suite, session)
893
894         session.close()
895
896         # If the .changes file says it has source, it must have source.
897         if self.pkg.changes["architecture"].has_key("source"):
898             if not has_source:
899                 self.rejects.append("no source found and Architecture line in changes mention source.")
900
901             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
902                 self.rejects.append("source only uploads are not supported.")
903
904     ###########################################################################
905     def check_dsc(self, action=True, session=None):
906         """Returns bool indicating whether or not the source changes are valid"""
907         # Ensure there is source to check
908         if not self.pkg.changes["architecture"].has_key("source"):
909             return True
910
911         # Find the .dsc
912         dsc_filename = None
913         for f, entry in self.pkg.files.items():
914             if entry["type"] == "dsc":
915                 if dsc_filename:
916                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
917                     return False
918                 else:
919                     dsc_filename = f
920
921         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
922         if not dsc_filename:
923             self.rejects.append("source uploads must contain a dsc file")
924             return False
925
926         # Parse the .dsc file
927         try:
928             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
929         except CantOpenError:
930             # if not -n copy_to_holding() will have done this for us...
931             if not action:
932                 self.rejects.append("%s: can't read file." % (dsc_filename))
933         except ParseChangesError, line:
934             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
935         except InvalidDscError, line:
936             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
937         except ChangesUnicodeError:
938             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
939
940         # Build up the file list of files mentioned by the .dsc
941         try:
942             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
943         except NoFilesFieldError:
944             self.rejects.append("%s: no Files: field." % (dsc_filename))
945             return False
946         except UnknownFormatError, format:
947             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
948             return False
949         except ParseChangesError, line:
950             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
951             return False
952
953         # Enforce mandatory fields
954         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
955             if not self.pkg.dsc.has_key(i):
956                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
957                 return False
958
959         # Validate the source and version fields
960         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
961             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
962         if not re_valid_version.match(self.pkg.dsc["version"]):
963             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
964
965         # Only a limited list of source formats are allowed in each suite
966         for dist in self.pkg.changes["distribution"].keys():
967             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
968             if self.pkg.dsc["format"] not in allowed:
969                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
970
971         # Validate the Maintainer field
972         try:
973             # We ignore the return value
974             fix_maintainer(self.pkg.dsc["maintainer"])
975         except ParseMaintError, msg:
976             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
977                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
978
979         # Validate the build-depends field(s)
980         for field_name in [ "build-depends", "build-depends-indep" ]:
981             field = self.pkg.dsc.get(field_name)
982             if field:
983                 # Have apt try to parse them...
984                 try:
985                     apt_pkg.ParseSrcDepends(field)
986                 except:
987                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
988
989         # Ensure the version number in the .dsc matches the version number in the .changes
990         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
991         changes_version = self.pkg.files[dsc_filename]["version"]
992
993         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
994             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
995
996         # Ensure the Files field contain only what's expected
997         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
998
999         # Ensure source is newer than existing source in target suites
1000         session = DBConn().session()
1001         self.check_source_against_db(dsc_filename, session)
1002         self.check_dsc_against_db(dsc_filename, session)
1003         session.close()
1004
1005         # Finally, check if we're missing any files
1006         for f in self.later_check_files:
1007             self.rejects.append("Could not find file %s references in changes" % f)
1008
1009         return True
1010
1011     ###########################################################################
1012
1013     def get_changelog_versions(self, source_dir):
1014         """Extracts a the source package and (optionally) grabs the
1015         version history out of debian/changelog for the BTS."""
1016
1017         cnf = Config()
1018
1019         # Find the .dsc (again)
1020         dsc_filename = None
1021         for f in self.pkg.files.keys():
1022             if self.pkg.files[f]["type"] == "dsc":
1023                 dsc_filename = f
1024
1025         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1026         if not dsc_filename:
1027             return
1028
1029         # Create a symlink mirror of the source files in our temporary directory
1030         for f in self.pkg.files.keys():
1031             m = re_issource.match(f)
1032             if m:
1033                 src = os.path.join(source_dir, f)
1034                 # If a file is missing for whatever reason, give up.
1035                 if not os.path.exists(src):
1036                     return
1037                 ftype = m.group(3)
1038                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1039                    self.pkg.orig_files[f].has_key("path"):
1040                     continue
1041                 dest = os.path.join(os.getcwd(), f)
1042                 os.symlink(src, dest)
1043
1044         # If the orig files are not a part of the upload, create symlinks to the
1045         # existing copies.
1046         for orig_file in self.pkg.orig_files.keys():
1047             if not self.pkg.orig_files[orig_file].has_key("path"):
1048                 continue
1049             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1050             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1051
1052         # Extract the source
1053         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1054         (result, output) = commands.getstatusoutput(cmd)
1055         if (result != 0):
1056             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1057             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1058             return
1059
1060         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1061             return
1062
1063         # Get the upstream version
1064         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1065         if re_strip_revision.search(upstr_version):
1066             upstr_version = re_strip_revision.sub('', upstr_version)
1067
1068         # Ensure the changelog file exists
1069         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1070         if not os.path.exists(changelog_filename):
1071             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1072             return
1073
1074         # Parse the changelog
1075         self.pkg.dsc["bts changelog"] = ""
1076         changelog_file = utils.open_file(changelog_filename)
1077         for line in changelog_file.readlines():
1078             m = re_changelog_versions.match(line)
1079             if m:
1080                 self.pkg.dsc["bts changelog"] += line
1081         changelog_file.close()
1082
1083         # Check we found at least one revision in the changelog
1084         if not self.pkg.dsc["bts changelog"]:
1085             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1086
1087     def check_source(self):
1088         # Bail out if:
1089         #    a) there's no source
1090         if not self.pkg.changes["architecture"].has_key("source"):
1091             return
1092
1093         tmpdir = utils.temp_dirname()
1094
1095         # Move into the temporary directory
1096         cwd = os.getcwd()
1097         os.chdir(tmpdir)
1098
1099         # Get the changelog version history
1100         self.get_changelog_versions(cwd)
1101
1102         # Move back and cleanup the temporary tree
1103         os.chdir(cwd)
1104
1105         try:
1106             shutil.rmtree(tmpdir)
1107         except OSError, e:
1108             if e.errno != errno.EACCES:
1109                 print "foobar"
1110                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1111
1112             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1113             # We probably have u-r or u-w directories so chmod everything
1114             # and try again.
1115             cmd = "chmod -R u+rwx %s" % (tmpdir)
1116             result = os.system(cmd)
1117             if result != 0:
1118                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1119             shutil.rmtree(tmpdir)
1120         except Exception, e:
1121             print "foobar2 (%s)" % e
1122             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1123
1124     ###########################################################################
1125     def ensure_hashes(self):
1126         # Make sure we recognise the format of the Files: field in the .changes
1127         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1128         if len(format) == 2:
1129             format = int(format[0]), int(format[1])
1130         else:
1131             format = int(float(format[0])), 0
1132
1133         # We need to deal with the original changes blob, as the fields we need
1134         # might not be in the changes dict serialised into the .dak anymore.
1135         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1136
1137         # Copy the checksums over to the current changes dict.  This will keep
1138         # the existing modifications to it intact.
1139         for field in orig_changes:
1140             if field.startswith('checksums-'):
1141                 self.pkg.changes[field] = orig_changes[field]
1142
1143         # Check for unsupported hashes
1144         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1145             self.rejects.append(j)
1146
1147         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1148             self.rejects.append(j)
1149
1150         # We have to calculate the hash if we have an earlier changes version than
1151         # the hash appears in rather than require it exist in the changes file
1152         for hashname, hashfunc, version in utils.known_hashes:
1153             # TODO: Move _ensure_changes_hash into this class
1154             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1155                 self.rejects.append(j)
1156             if "source" in self.pkg.changes["architecture"]:
1157                 # TODO: Move _ensure_dsc_hash into this class
1158                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1159                     self.rejects.append(j)
1160
1161     def check_hashes(self):
1162         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1163             self.rejects.append(m)
1164
1165         for m in utils.check_size(".changes", self.pkg.files):
1166             self.rejects.append(m)
1167
1168         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1169             self.rejects.append(m)
1170
1171         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1172             self.rejects.append(m)
1173
1174         self.ensure_hashes()
1175
1176     ###########################################################################
1177
1178     def ensure_orig(self, target_dir='.', session=None):
1179         """
1180         Ensures that all orig files mentioned in the changes file are present
1181         in target_dir. If they do not exist, they are symlinked into place.
1182
1183         An list containing the symlinks that were created are returned (so they
1184         can be removed).
1185         """
1186
1187         symlinked = []
1188         cnf = Config()
1189
1190         for filename, entry in self.pkg.dsc_files.iteritems():
1191             if not re_is_orig_source.match(filename):
1192                 # File is not an orig; ignore
1193                 continue
1194
1195             if os.path.exists(filename):
1196                 # File exists, no need to continue
1197                 continue
1198
1199             def symlink_if_valid(path):
1200                 f = utils.open_file(path)
1201                 md5sum = apt_pkg.md5sum(f)
1202                 f.close()
1203
1204                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1205                 expected = (int(entry['size']), entry['md5sum'])
1206
1207                 if fingerprint != expected:
1208                     return False
1209
1210                 dest = os.path.join(target_dir, filename)
1211
1212                 os.symlink(path, dest)
1213                 symlinked.append(dest)
1214
1215                 return True
1216
1217             session_ = session
1218             if session is None:
1219                 session_ = DBConn().session()
1220
1221             found = False
1222
1223             # Look in the pool
1224             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1225                 poolfile_path = os.path.join(
1226                     poolfile.location.path, poolfile.filename
1227                 )
1228
1229                 if symlink_if_valid(poolfile_path):
1230                     found = True
1231                     break
1232
1233             if session is None:
1234                 session_.close()
1235
1236             if found:
1237                 continue
1238
1239             # Look in some other queues for the file
1240             queues = ('New', 'Byhand', 'ProposedUpdates',
1241                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1242
1243             for queue in queues:
1244                 if not cnf.get('Dir::Queue::%s' % queue):
1245                     continue
1246
1247                 queuefile_path = os.path.join(
1248                     cnf['Dir::Queue::%s' % queue], filename
1249                 )
1250
1251                 if not os.path.exists(queuefile_path):
1252                     # Does not exist in this queue
1253                     continue
1254
1255                 if symlink_if_valid(queuefile_path):
1256                     break
1257
1258         return symlinked
1259
1260     ###########################################################################
1261
1262     def check_lintian(self):
1263         """
1264         Extends self.rejects by checking the output of lintian against tags
1265         specified in Dinstall::LintianTags.
1266         """
1267
1268         cnf = Config()
1269
1270         # Don't reject binary uploads
1271         if not self.pkg.changes['architecture'].has_key('source'):
1272             return
1273
1274         # Only check some distributions
1275         for dist in ('unstable', 'experimental'):
1276             if dist in self.pkg.changes['distribution']:
1277                 break
1278         else:
1279             return
1280
1281         # If we do not have a tagfile, don't do anything
1282         tagfile = cnf.get("Dinstall::LintianTags")
1283         if tagfile is None:
1284             return
1285
1286         # Parse the yaml file
1287         sourcefile = file(tagfile, 'r')
1288         sourcecontent = sourcefile.read()
1289         sourcefile.close()
1290
1291         try:
1292             lintiantags = yaml.load(sourcecontent)['lintian']
1293         except yaml.YAMLError, msg:
1294             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1295             return
1296
1297         # Try and find all orig mentioned in the .dsc
1298         symlinked = self.ensure_orig()
1299
1300         # Setup the input file for lintian
1301         fd, temp_filename = utils.temp_filename()
1302         temptagfile = os.fdopen(fd, 'w')
1303         for tags in lintiantags.values():
1304             temptagfile.writelines(['%s\n' % x for x in tags])
1305         temptagfile.close()
1306
1307         try:
1308             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1309                 (temp_filename, self.pkg.changes_file)
1310
1311             result, output = commands.getstatusoutput(cmd)
1312         finally:
1313             # Remove our tempfile and any symlinks we created
1314             os.unlink(temp_filename)
1315
1316             for symlink in symlinked:
1317                 os.unlink(symlink)
1318
1319         if result == 2:
1320             utils.warn("lintian failed for %s [return code: %s]." % \
1321                 (self.pkg.changes_file, result))
1322             utils.warn(utils.prefix_multi_line_string(output, \
1323                 " [possible output:] "))
1324
1325         def log(*txt):
1326             if self.logger:
1327                 self.logger.log(
1328                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1329                 )
1330
1331         # Generate messages
1332         parsed_tags = parse_lintian_output(output)
1333         self.rejects.extend(
1334             generate_reject_messages(parsed_tags, lintiantags, log=log)
1335         )
1336
1337     ###########################################################################
1338     def check_urgency(self):
1339         cnf = Config()
1340         if self.pkg.changes["architecture"].has_key("source"):
1341             if not self.pkg.changes.has_key("urgency"):
1342                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1343             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1344             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1345                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1346                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1347                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1348
1349     ###########################################################################
1350
1351     # Sanity check the time stamps of files inside debs.
1352     # [Files in the near future cause ugly warnings and extreme time
1353     #  travel can cause errors on extraction]
1354
1355     def check_timestamps(self):
1356         Cnf = Config()
1357
1358         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1359         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1360         tar = TarTime(future_cutoff, past_cutoff)
1361
1362         for filename, entry in self.pkg.files.items():
1363             if entry["type"] == "deb":
1364                 tar.reset()
1365                 try:
1366                     deb_file = utils.open_file(filename)
1367                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1368                     deb_file.seek(0)
1369                     try:
1370                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1371                     except SystemError, e:
1372                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1373                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1374                             raise
1375                         deb_file.seek(0)
1376                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1377
1378                     deb_file.close()
1379
1380                     future_files = tar.future_files.keys()
1381                     if future_files:
1382                         num_future_files = len(future_files)
1383                         future_file = future_files[0]
1384                         future_date = tar.future_files[future_file]
1385                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1386                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1387
1388                     ancient_files = tar.ancient_files.keys()
1389                     if ancient_files:
1390                         num_ancient_files = len(ancient_files)
1391                         ancient_file = ancient_files[0]
1392                         ancient_date = tar.ancient_files[ancient_file]
1393                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1394                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1395                 except:
1396                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1397
1398     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1399         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1400             sponsored = False
1401         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1402             sponsored = False
1403             if uid_name == "":
1404                 sponsored = True
1405         else:
1406             sponsored = True
1407             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1408                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1409                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1410                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1411                         self.pkg.changes["sponsoremail"] = uid_email
1412
1413         return sponsored
1414
1415
1416     ###########################################################################
1417     # check_signed_by_key checks
1418     ###########################################################################
1419
1420     def check_signed_by_key(self):
1421         """Ensure the .changes is signed by an authorized uploader."""
1422         session = DBConn().session()
1423
1424         # First of all we check that the person has proper upload permissions
1425         # and that this upload isn't blocked
1426         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1427
1428         if fpr is None:
1429             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1430             return
1431
1432         # TODO: Check that import-keyring adds UIDs properly
1433         if not fpr.uid:
1434             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1435             return
1436
1437         # Check that the fingerprint which uploaded has permission to do so
1438         self.check_upload_permissions(fpr, session)
1439
1440         # Check that this package is not in a transition
1441         self.check_transition(session)
1442
1443         session.close()
1444
1445
1446     def check_upload_permissions(self, fpr, session):
1447         # Check any one-off upload blocks
1448         self.check_upload_blocks(fpr, session)
1449
1450         # Start with DM as a special case
1451         # DM is a special case unfortunately, so we check it first
1452         # (keys with no source access get more access than DMs in one
1453         #  way; DMs can only upload for their packages whether source
1454         #  or binary, whereas keys with no access might be able to
1455         #  upload some binaries)
1456         if fpr.source_acl.access_level == 'dm':
1457             self.check_dm_upload(fpr, session)
1458         else:
1459             # Check source-based permissions for other types
1460             if self.pkg.changes["architecture"].has_key("source") and \
1461                 fpr.source_acl.access_level is None:
1462                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1463                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1464                 self.rejects.append(rej)
1465                 return
1466             # If not a DM, we allow full upload rights
1467             uid_email = "%s@debian.org" % (fpr.uid.uid)
1468             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1469
1470
1471         # Check binary upload permissions
1472         # By this point we know that DMs can't have got here unless they
1473         # are allowed to deal with the package concerned so just apply
1474         # normal checks
1475         if fpr.binary_acl.access_level == 'full':
1476             return
1477
1478         # Otherwise we're in the map case
1479         tmparches = self.pkg.changes["architecture"].copy()
1480         tmparches.pop('source', None)
1481
1482         for bam in fpr.binary_acl_map:
1483             tmparches.pop(bam.architecture.arch_string, None)
1484
1485         if len(tmparches.keys()) > 0:
1486             if fpr.binary_reject:
1487                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1488                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1489                 self.rejects.append(rej)
1490             else:
1491                 # TODO: This is where we'll implement reject vs throw away binaries later
1492                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1493                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1494                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1495                 self.rejects.append(rej)
1496
1497
1498     def check_upload_blocks(self, fpr, session):
1499         """Check whether any upload blocks apply to this source, source
1500            version, uid / fpr combination"""
1501
1502         def block_rej_template(fb):
1503             rej = 'Manual upload block in place for package %s' % fb.source
1504             if fb.version is not None:
1505                 rej += ', version %s' % fb.version
1506             return rej
1507
1508         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1509             # version is None if the block applies to all versions
1510             if fb.version is None or fb.version == self.pkg.changes['version']:
1511                 # Check both fpr and uid - either is enough to cause a reject
1512                 if fb.fpr is not None:
1513                     if fb.fpr.fingerprint == fpr.fingerprint:
1514                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1515                 if fb.uid is not None:
1516                     if fb.uid == fpr.uid:
1517                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1518
1519
1520     def check_dm_upload(self, fpr, session):
1521         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1522         ## none of the uploaded packages are NEW
1523         rej = False
1524         for f in self.pkg.files.keys():
1525             if self.pkg.files[f].has_key("byhand"):
1526                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1527                 rej = True
1528             if self.pkg.files[f].has_key("new"):
1529                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1530                 rej = True
1531
1532         if rej:
1533             return
1534
1535         ## the most recent version of the package uploaded to unstable or
1536         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1537         ## section of its control file
1538         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1539         q = q.join(SrcAssociation)
1540         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1541         q = q.order_by(desc('source.version')).limit(1)
1542
1543         r = q.all()
1544
1545         if len(r) != 1:
1546             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1547             self.rejects.append(rej)
1548             return
1549
1550         r = r[0]
1551         if not r.dm_upload_allowed:
1552             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1553             self.rejects.append(rej)
1554             return
1555
1556         ## the Maintainer: field of the uploaded .changes file corresponds with
1557         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1558         ## uploads)
1559         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1560             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1561
1562         ## the most recent version of the package uploaded to unstable or
1563         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1564         ## non-developer maintainers cannot NMU or hijack packages)
1565
1566         # srcuploaders includes the maintainer
1567         accept = False
1568         for sup in r.srcuploaders:
1569             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1570             # Eww - I hope we never have two people with the same name in Debian
1571             if email == fpr.uid.uid or name == fpr.uid.name:
1572                 accept = True
1573                 break
1574
1575         if not accept:
1576             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1577             return
1578
1579         ## none of the packages are being taken over from other source packages
1580         for b in self.pkg.changes["binary"].keys():
1581             for suite in self.pkg.changes["distribution"].keys():
1582                 q = session.query(DBSource)
1583                 q = q.join(DBBinary).filter_by(package=b)
1584                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1585
1586                 for s in q.all():
1587                     if s.source != self.pkg.changes["source"]:
1588                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1589
1590
1591
1592     def check_transition(self, session):
1593         cnf = Config()
1594
1595         sourcepkg = self.pkg.changes["source"]
1596
1597         # No sourceful upload -> no need to do anything else, direct return
1598         # We also work with unstable uploads, not experimental or those going to some
1599         # proposed-updates queue
1600         if "source" not in self.pkg.changes["architecture"] or \
1601            "unstable" not in self.pkg.changes["distribution"]:
1602             return
1603
1604         # Also only check if there is a file defined (and existant) with
1605         # checks.
1606         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1607         if transpath == "" or not os.path.exists(transpath):
1608             return
1609
1610         # Parse the yaml file
1611         sourcefile = file(transpath, 'r')
1612         sourcecontent = sourcefile.read()
1613         try:
1614             transitions = yaml.load(sourcecontent)
1615         except yaml.YAMLError, msg:
1616             # This shouldn't happen, there is a wrapper to edit the file which
1617             # checks it, but we prefer to be safe than ending up rejecting
1618             # everything.
1619             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1620             return
1621
1622         # Now look through all defined transitions
1623         for trans in transitions:
1624             t = transitions[trans]
1625             source = t["source"]
1626             expected = t["new"]
1627
1628             # Will be None if nothing is in testing.
1629             current = get_source_in_suite(source, "testing", session)
1630             if current is not None:
1631                 compare = apt_pkg.VersionCompare(current.version, expected)
1632
1633             if current is None or compare < 0:
1634                 # This is still valid, the current version in testing is older than
1635                 # the new version we wait for, or there is none in testing yet
1636
1637                 # Check if the source we look at is affected by this.
1638                 if sourcepkg in t['packages']:
1639                     # The source is affected, lets reject it.
1640
1641                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1642                         sourcepkg, trans)
1643
1644                     if current is not None:
1645                         currentlymsg = "at version %s" % (current.version)
1646                     else:
1647                         currentlymsg = "not present in testing"
1648
1649                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1650
1651                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1652 is part of a testing transition designed to get %s migrated (it is
1653 currently %s, we need version %s).  This transition is managed by the
1654 Release Team, and %s is the Release-Team member responsible for it.
1655 Please mail debian-release@lists.debian.org or contact %s directly if you
1656 need further assistance.  You might want to upload to experimental until this
1657 transition is done."""
1658                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1659
1660                     self.rejects.append(rejectmsg)
1661                     return
1662
1663     ###########################################################################
1664     # End check_signed_by_key checks
1665     ###########################################################################
1666
1667     def build_summaries(self):
1668         """ Build a summary of changes the upload introduces. """
1669
1670         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1671
1672         short_summary = summary
1673
1674         # This is for direport's benefit...
1675         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1676
1677         if byhand or new:
1678             summary += "Changes: " + f
1679
1680         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1681
1682         summary += self.announce(short_summary, 0)
1683
1684         return (summary, short_summary)
1685
1686     ###########################################################################
1687
1688     def close_bugs(self, summary, action):
1689         """
1690         Send mail to close bugs as instructed by the closes field in the changes file.
1691         Also add a line to summary if any work was done.
1692
1693         @type summary: string
1694         @param summary: summary text, as given by L{build_summaries}
1695
1696         @type action: bool
1697         @param action: Set to false no real action will be done.
1698
1699         @rtype: string
1700         @return: summary. If action was taken, extended by the list of closed bugs.
1701
1702         """
1703
1704         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1705
1706         bugs = self.pkg.changes["closes"].keys()
1707
1708         if not bugs:
1709             return summary
1710
1711         bugs.sort()
1712         summary += "Closing bugs: "
1713         for bug in bugs:
1714             summary += "%s " % (bug)
1715             if action:
1716                 self.update_subst()
1717                 self.Subst["__BUG_NUMBER__"] = bug
1718                 if self.pkg.changes["distribution"].has_key("stable"):
1719                     self.Subst["__STABLE_WARNING__"] = """
1720 Note that this package is not part of the released stable Debian
1721 distribution.  It may have dependencies on other unreleased software,
1722 or other instabilities.  Please take care if you wish to install it.
1723 The update will eventually make its way into the next released Debian
1724 distribution."""
1725                 else:
1726                     self.Subst["__STABLE_WARNING__"] = ""
1727                 mail_message = utils.TemplateSubst(self.Subst, template)
1728                 utils.send_mail(mail_message)
1729
1730                 # Clear up after ourselves
1731                 del self.Subst["__BUG_NUMBER__"]
1732                 del self.Subst["__STABLE_WARNING__"]
1733
1734         if action and self.logger:
1735             self.logger.log(["closing bugs"] + bugs)
1736
1737         summary += "\n"
1738
1739         return summary
1740
1741     ###########################################################################
1742
1743     def announce(self, short_summary, action):
1744         """
1745         Send an announce mail about a new upload.
1746
1747         @type short_summary: string
1748         @param short_summary: Short summary text to include in the mail
1749
1750         @type action: bool
1751         @param action: Set to false no real action will be done.
1752
1753         @rtype: string
1754         @return: Textstring about action taken.
1755
1756         """
1757
1758         cnf = Config()
1759         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1760
1761         # Only do announcements for source uploads with a recent dpkg-dev installed
1762         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1763            self.pkg.changes["architecture"].has_key("source"):
1764             return ""
1765
1766         lists_done = {}
1767         summary = ""
1768
1769         self.Subst["__SHORT_SUMMARY__"] = short_summary
1770
1771         for dist in self.pkg.changes["distribution"].keys():
1772             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1773             if announce_list == "" or lists_done.has_key(announce_list):
1774                 continue
1775
1776             lists_done[announce_list] = 1
1777             summary += "Announcing to %s\n" % (announce_list)
1778
1779             if action:
1780                 self.update_subst()
1781                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1782                 if cnf.get("Dinstall::TrackingServer") and \
1783                    self.pkg.changes["architecture"].has_key("source"):
1784                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1785                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1786
1787                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1788                 utils.send_mail(mail_message)
1789
1790                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1791
1792         if cnf.FindB("Dinstall::CloseBugs"):
1793             summary = self.close_bugs(summary, action)
1794
1795         del self.Subst["__SHORT_SUMMARY__"]
1796
1797         return summary
1798
1799     ###########################################################################
1800     @session_wrapper
1801     def accept (self, summary, short_summary, session=None):
1802         """
1803         Accept an upload.
1804
1805         This moves all files referenced from the .changes into the pool,
1806         sends the accepted mail, announces to lists, closes bugs and
1807         also checks for override disparities. If enabled it will write out
1808         the version history for the BTS Version Tracking and will finally call
1809         L{queue_build}.
1810
1811         @type summary: string
1812         @param summary: Summary text
1813
1814         @type short_summary: string
1815         @param short_summary: Short summary
1816         """
1817
1818         cnf = Config()
1819         stats = SummaryStats()
1820
1821         print "Installing."
1822         self.logger.log(["installing changes", self.pkg.changes_file])
1823
1824         poolfiles = []
1825
1826         # Add the .dsc file to the DB first
1827         for newfile, entry in self.pkg.files.items():
1828             if entry["type"] == "dsc":
1829                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1830                 for j in pfs:
1831                     poolfiles.append(j)
1832
1833         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1834         for newfile, entry in self.pkg.files.items():
1835             if entry["type"] == "deb":
1836                 poolfiles.append(add_deb_to_db(self, newfile, session))
1837
1838         # If this is a sourceful diff only upload that is moving
1839         # cross-component we need to copy the .orig files into the new
1840         # component too for the same reasons as above.
1841         # XXX: mhy: I think this should be in add_dsc_to_db
1842         if self.pkg.changes["architecture"].has_key("source"):
1843             for orig_file in self.pkg.orig_files.keys():
1844                 if not self.pkg.orig_files[orig_file].has_key("id"):
1845                     continue # Skip if it's not in the pool
1846                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1847                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1848                     continue # Skip if the location didn't change
1849
1850                 # Do the move
1851                 oldf = get_poolfile_by_id(orig_file_id, session)
1852                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1853                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1854                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1855
1856                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1857
1858                 # TODO: Care about size/md5sum collisions etc
1859                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1860
1861                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1862                 if newf is None:
1863                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1864                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1865
1866                     session.flush()
1867
1868                     # Don't reference the old file from this changes
1869                     for p in poolfiles:
1870                         if p.file_id == oldf.file_id:
1871                             poolfiles.remove(p)
1872
1873                     poolfiles.append(newf)
1874
1875                     # Fix up the DSC references
1876                     toremove = []
1877
1878                     for df in source.srcfiles:
1879                         if df.poolfile.file_id == oldf.file_id:
1880                             # Add a new DSC entry and mark the old one for deletion
1881                             # Don't do it in the loop so we don't change the thing we're iterating over
1882                             newdscf = DSCFile()
1883                             newdscf.source_id = source.source_id
1884                             newdscf.poolfile_id = newf.file_id
1885                             session.add(newdscf)
1886
1887                             toremove.append(df)
1888
1889                     for df in toremove:
1890                         session.delete(df)
1891
1892                     # Flush our changes
1893                     session.flush()
1894
1895                     # Make sure that our source object is up-to-date
1896                     session.expire(source)
1897
1898         # Add changelog information to the database
1899         self.store_changelog()
1900
1901         # Install the files into the pool
1902         for newfile, entry in self.pkg.files.items():
1903             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1904             utils.move(newfile, destination)
1905             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1906             stats.accept_bytes += float(entry["size"])
1907
1908         # Copy the .changes file across for suite which need it.
1909         copy_changes = {}
1910         for suite_name in self.pkg.changes["distribution"].keys():
1911             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1912                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1913
1914         for dest in copy_changes.keys():
1915             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1916
1917         # We're done - commit the database changes
1918         session.commit()
1919         # Our SQL session will automatically start a new transaction after
1920         # the last commit
1921
1922         # Move the .changes into the 'done' directory
1923         utils.move(self.pkg.changes_file,
1924                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1925
1926         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1927             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1928
1929         self.update_subst()
1930         self.Subst["__SUITE__"] = ""
1931         self.Subst["__SUMMARY__"] = summary
1932         mail_message = utils.TemplateSubst(self.Subst,
1933                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1934         utils.send_mail(mail_message)
1935         self.announce(short_summary, 1)
1936
1937         ## Helper stuff for DebBugs Version Tracking
1938         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1939             if self.pkg.changes["architecture"].has_key("source"):
1940                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1941                 version_history = os.fdopen(fd, 'w')
1942                 version_history.write(self.pkg.dsc["bts changelog"])
1943                 version_history.close()
1944                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1945                                       self.pkg.changes_file[:-8]+".versions")
1946                 os.rename(temp_filename, filename)
1947                 os.chmod(filename, 0644)
1948
1949             # Write out the binary -> source mapping.
1950             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1951             debinfo = os.fdopen(fd, 'w')
1952             for name, entry in sorted(self.pkg.files.items()):
1953                 if entry["type"] == "deb":
1954                     line = " ".join([entry["package"], entry["version"],
1955                                      entry["architecture"], entry["source package"],
1956                                      entry["source version"]])
1957                     debinfo.write(line+"\n")
1958             debinfo.close()
1959             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1960                                   self.pkg.changes_file[:-8]+".debinfo")
1961             os.rename(temp_filename, filename)
1962             os.chmod(filename, 0644)
1963
1964         session.commit()
1965
1966         # Set up our copy queues (e.g. buildd queues)
1967         for suite_name in self.pkg.changes["distribution"].keys():
1968             suite = get_suite(suite_name, session)
1969             for q in suite.copy_queues:
1970                 for f in poolfiles:
1971                     q.add_file_from_pool(f)
1972
1973         session.commit()
1974
1975         # Finally...
1976         stats.accept_count += 1
1977
1978     def check_override(self):
1979         """
1980         Checks override entries for validity. Mails "Override disparity" warnings,
1981         if that feature is enabled.
1982
1983         Abandons the check if
1984           - override disparity checks are disabled
1985           - mail sending is disabled
1986         """
1987
1988         cnf = Config()
1989
1990         # Abandon the check if override disparity checks have been disabled
1991         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1992             return
1993
1994         summary = self.pkg.check_override()
1995
1996         if summary == "":
1997             return
1998
1999         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2000
2001         self.update_subst()
2002         self.Subst["__SUMMARY__"] = summary
2003         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2004         utils.send_mail(mail_message)
2005         del self.Subst["__SUMMARY__"]
2006
2007     ###########################################################################
2008
2009     def remove(self, from_dir=None):
2010         """
2011         Used (for instance) in p-u to remove the package from unchecked
2012
2013         Also removes the package from holding area.
2014         """
2015         if from_dir is None:
2016             from_dir = self.pkg.directory
2017         h = Holding()
2018
2019         for f in self.pkg.files.keys():
2020             os.unlink(os.path.join(from_dir, f))
2021             if os.path.exists(os.path.join(h.holding_dir, f)):
2022                 os.unlink(os.path.join(h.holding_dir, f))
2023
2024         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2025         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2026             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2027
2028     ###########################################################################
2029
2030     def move_to_queue (self, queue):
2031         """
2032         Move files to a destination queue using the permissions in the table
2033         """
2034         h = Holding()
2035         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2036                    queue.path, perms=int(queue.change_perms, 8))
2037         for f in self.pkg.files.keys():
2038             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2039
2040     ###########################################################################
2041
2042     def force_reject(self, reject_files):
2043         """
2044         Forcefully move files from the current directory to the
2045         reject directory.  If any file already exists in the reject
2046         directory it will be moved to the morgue to make way for
2047         the new file.
2048
2049         @type reject_files: dict
2050         @param reject_files: file dictionary
2051
2052         """
2053
2054         cnf = Config()
2055
2056         for file_entry in reject_files:
2057             # Skip any files which don't exist or which we don't have permission to copy.
2058             if os.access(file_entry, os.R_OK) == 0:
2059                 continue
2060
2061             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2062
2063             try:
2064                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2065             except OSError, e:
2066                 # File exists?  Let's find a new name by adding a number
2067                 if e.errno == errno.EEXIST:
2068                     try:
2069                         dest_file = utils.find_next_free(dest_file, 255)
2070                     except NoFreeFilenameError:
2071                         # Something's either gone badly Pete Tong, or
2072                         # someone is trying to exploit us.
2073                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2074                         return
2075
2076                     # Make sure we really got it
2077                     try:
2078                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2079                     except OSError, e:
2080                         # Likewise
2081                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2082                         return
2083                 else:
2084                     raise
2085             # If we got here, we own the destination file, so we can
2086             # safely overwrite it.
2087             utils.move(file_entry, dest_file, 1, perms=0660)
2088             os.close(dest_fd)
2089
2090     ###########################################################################
2091     def do_reject (self, manual=0, reject_message="", notes=""):
2092         """
2093         Reject an upload. If called without a reject message or C{manual} is
2094         true, spawn an editor so the user can write one.
2095
2096         @type manual: bool
2097         @param manual: manual or automated rejection
2098
2099         @type reject_message: string
2100         @param reject_message: A reject message
2101
2102         @return: 0
2103
2104         """
2105         # If we weren't given a manual rejection message, spawn an
2106         # editor so the user can add one in...
2107         if manual and not reject_message:
2108             (fd, temp_filename) = utils.temp_filename()
2109             temp_file = os.fdopen(fd, 'w')
2110             if len(notes) > 0:
2111                 for note in notes:
2112                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2113                                     % (note.author, note.version, note.notedate, note.comment))
2114             temp_file.close()
2115             editor = os.environ.get("EDITOR","vi")
2116             answer = 'E'
2117             while answer == 'E':
2118                 os.system("%s %s" % (editor, temp_filename))
2119                 temp_fh = utils.open_file(temp_filename)
2120                 reject_message = "".join(temp_fh.readlines())
2121                 temp_fh.close()
2122                 print "Reject message:"
2123                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2124                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2125                 answer = "XXX"
2126                 while prompt.find(answer) == -1:
2127                     answer = utils.our_raw_input(prompt)
2128                     m = re_default_answer.search(prompt)
2129                     if answer == "":
2130                         answer = m.group(1)
2131                     answer = answer[:1].upper()
2132             os.unlink(temp_filename)
2133             if answer == 'A':
2134                 return 1
2135             elif answer == 'Q':
2136                 sys.exit(0)
2137
2138         print "Rejecting.\n"
2139
2140         cnf = Config()
2141
2142         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2143         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2144
2145         # Move all the files into the reject directory
2146         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2147         self.force_reject(reject_files)
2148
2149         # If we fail here someone is probably trying to exploit the race
2150         # so let's just raise an exception ...
2151         if os.path.exists(reason_filename):
2152             os.unlink(reason_filename)
2153         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2154
2155         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2156
2157         self.update_subst()
2158         if not manual:
2159             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2160             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2161             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2162             os.write(reason_fd, reject_message)
2163             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2164         else:
2165             # Build up the rejection email
2166             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2167             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2168             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2169             self.Subst["__REJECT_MESSAGE__"] = ""
2170             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2171             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2172             # Write the rejection email out as the <foo>.reason file
2173             os.write(reason_fd, reject_mail_message)
2174
2175         del self.Subst["__REJECTOR_ADDRESS__"]
2176         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2177         del self.Subst["__CC__"]
2178
2179         os.close(reason_fd)
2180
2181         # Send the rejection mail
2182         utils.send_mail(reject_mail_message)
2183
2184         if self.logger:
2185             self.logger.log(["rejected", self.pkg.changes_file])
2186
2187         return 0
2188
2189     ################################################################################
2190     def in_override_p(self, package, component, suite, binary_type, filename, session):
2191         """
2192         Check if a package already has override entries in the DB
2193
2194         @type package: string
2195         @param package: package name
2196
2197         @type component: string
2198         @param component: database id of the component
2199
2200         @type suite: int
2201         @param suite: database id of the suite
2202
2203         @type binary_type: string
2204         @param binary_type: type of the package
2205
2206         @type filename: string
2207         @param filename: filename we check
2208
2209         @return: the database result. But noone cares anyway.
2210
2211         """
2212
2213         cnf = Config()
2214
2215         if binary_type == "": # must be source
2216             file_type = "dsc"
2217         else:
2218             file_type = binary_type
2219
2220         # Override suite name; used for example with proposed-updates
2221         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2222             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2223
2224         result = get_override(package, suite, component, file_type, session)
2225
2226         # If checking for a source package fall back on the binary override type
2227         if file_type == "dsc" and len(result) < 1:
2228             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2229
2230         # Remember the section and priority so we can check them later if appropriate
2231         if len(result) > 0:
2232             result = result[0]
2233             self.pkg.files[filename]["override section"] = result.section.section
2234             self.pkg.files[filename]["override priority"] = result.priority.priority
2235             return result
2236
2237         return None
2238
2239     ################################################################################
2240     def get_anyversion(self, sv_list, suite):
2241         """
2242         @type sv_list: list
2243         @param sv_list: list of (suite, version) tuples to check
2244
2245         @type suite: string
2246         @param suite: suite name
2247
2248         Description: TODO
2249         """
2250         Cnf = Config()
2251         anyversion = None
2252         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2253         for (s, v) in sv_list:
2254             if s in [ x.lower() for x in anysuite ]:
2255                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2256                     anyversion = v
2257
2258         return anyversion
2259
2260     ################################################################################
2261
2262     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2263         """
2264         @type sv_list: list
2265         @param sv_list: list of (suite, version) tuples to check
2266
2267         @type filename: string
2268         @param filename: XXX
2269
2270         @type new_version: string
2271         @param new_version: XXX
2272
2273         Ensure versions are newer than existing packages in target
2274         suites and that cross-suite version checking rules as
2275         set out in the conf file are satisfied.
2276         """
2277
2278         cnf = Config()
2279
2280         # Check versions for each target suite
2281         for target_suite in self.pkg.changes["distribution"].keys():
2282             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2283             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2284
2285             # Enforce "must be newer than target suite" even if conffile omits it
2286             if target_suite not in must_be_newer_than:
2287                 must_be_newer_than.append(target_suite)
2288
2289             for (suite, existent_version) in sv_list:
2290                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2291
2292                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2293                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2294
2295                 if suite in must_be_older_than and vercmp > -1:
2296                     cansave = 0
2297
2298                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2299                         # we really use the other suite, ignoring the conflicting one ...
2300                         addsuite = self.pkg.changes["distribution-version"][suite]
2301
2302                         add_version = self.get_anyversion(sv_list, addsuite)
2303                         target_version = self.get_anyversion(sv_list, target_suite)
2304
2305                         if not add_version:
2306                             # not add_version can only happen if we map to a suite
2307                             # that doesn't enhance the suite we're propup'ing from.
2308                             # so "propup-ver x a b c; map a d" is a problem only if
2309                             # d doesn't enhance a.
2310                             #
2311                             # i think we could always propagate in this case, rather
2312                             # than complaining. either way, this isn't a REJECT issue
2313                             #
2314                             # And - we really should complain to the dorks who configured dak
2315                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2316                             self.pkg.changes.setdefault("propdistribution", {})
2317                             self.pkg.changes["propdistribution"][addsuite] = 1
2318                             cansave = 1
2319                         elif not target_version:
2320                             # not targets_version is true when the package is NEW
2321                             # we could just stick with the "...old version..." REJECT
2322                             # for this, I think.
2323                             self.rejects.append("Won't propogate NEW packages.")
2324                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2325                             # propogation would be redundant. no need to reject though.
2326                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2327                             cansave = 1
2328                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2329                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2330                             # propogate!!
2331                             self.warnings.append("Propogating upload to %s" % (addsuite))
2332                             self.pkg.changes.setdefault("propdistribution", {})
2333                             self.pkg.changes["propdistribution"][addsuite] = 1
2334                             cansave = 1
2335
2336                     if not cansave:
2337                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2338
2339     ################################################################################
2340     def check_binary_against_db(self, filename, session):
2341         # Ensure version is sane
2342         q = session.query(BinAssociation)
2343         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2344         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2345
2346         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2347                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2348
2349         # Check for any existing copies of the file
2350         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2351         q = q.filter_by(version=self.pkg.files[filename]["version"])
2352         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2353
2354         if q.count() > 0:
2355             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2356
2357     ################################################################################
2358
2359     def check_source_against_db(self, filename, session):
2360         source = self.pkg.dsc.get("source")
2361         version = self.pkg.dsc.get("version")
2362
2363         # Ensure version is sane
2364         q = session.query(SrcAssociation)
2365         q = q.join(DBSource).filter(DBSource.source==source)
2366
2367         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2368                                        filename, version, sourceful=True)
2369
2370     ################################################################################
2371     def check_dsc_against_db(self, filename, session):
2372         """
2373
2374         @warning: NB: this function can remove entries from the 'files' index [if
2375          the orig tarball is a duplicate of the one in the archive]; if
2376          you're iterating over 'files' and call this function as part of
2377          the loop, be sure to add a check to the top of the loop to
2378          ensure you haven't just tried to dereference the deleted entry.
2379
2380         """
2381
2382         Cnf = Config()
2383         self.pkg.orig_files = {} # XXX: do we need to clear it?
2384         orig_files = self.pkg.orig_files
2385
2386         # Try and find all files mentioned in the .dsc.  This has
2387         # to work harder to cope with the multiple possible
2388         # locations of an .orig.tar.gz.
2389         # The ordering on the select is needed to pick the newest orig
2390         # when it exists in multiple places.
2391         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2392             found = None
2393             if self.pkg.files.has_key(dsc_name):
2394                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2395                 actual_size = int(self.pkg.files[dsc_name]["size"])
2396                 found = "%s in incoming" % (dsc_name)
2397
2398                 # Check the file does not already exist in the archive
2399                 ql = get_poolfile_like_name(dsc_name, session)
2400
2401                 # Strip out anything that isn't '%s' or '/%s$'
2402                 for i in ql:
2403                     if not i.filename.endswith(dsc_name):
2404                         ql.remove(i)
2405
2406                 # "[dak] has not broken them.  [dak] has fixed a
2407                 # brokenness.  Your crappy hack exploited a bug in
2408                 # the old dinstall.
2409                 #
2410                 # "(Come on!  I thought it was always obvious that
2411                 # one just doesn't release different files with
2412                 # the same name and version.)"
2413                 #                        -- ajk@ on d-devel@l.d.o
2414
2415                 if len(ql) > 0:
2416                     # Ignore exact matches for .orig.tar.gz
2417                     match = 0
2418                     if re_is_orig_source.match(dsc_name):
2419                         for i in ql:
2420                             if self.pkg.files.has_key(dsc_name) and \
2421                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2422                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2423                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2424                                 # TODO: Don't delete the entry, just mark it as not needed
2425                                 # This would fix the stupidity of changing something we often iterate over
2426                                 # whilst we're doing it
2427                                 del self.pkg.files[dsc_name]
2428                                 dsc_entry["files id"] = i.file_id
2429                                 if not orig_files.has_key(dsc_name):
2430                                     orig_files[dsc_name] = {}
2431                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2432                                 match = 1
2433
2434                                 # Don't bitch that we couldn't find this file later
2435                                 try:
2436                                     self.later_check_files.remove(dsc_name)
2437                                 except ValueError:
2438                                     pass
2439
2440
2441                     if not match:
2442                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2443
2444             elif re_is_orig_source.match(dsc_name):
2445                 # Check in the pool
2446                 ql = get_poolfile_like_name(dsc_name, session)
2447
2448                 # Strip out anything that isn't '%s' or '/%s$'
2449                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2450                 for i in ql:
2451                     if not i.filename.endswith(dsc_name):
2452                         ql.remove(i)
2453
2454                 if len(ql) > 0:
2455                     # Unfortunately, we may get more than one match here if,
2456                     # for example, the package was in potato but had an -sa
2457                     # upload in woody.  So we need to choose the right one.
2458
2459                     # default to something sane in case we don't match any or have only one
2460                     x = ql[0]
2461
2462                     if len(ql) > 1:
2463                         for i in ql:
2464                             old_file = os.path.join(i.location.path, i.filename)
2465                             old_file_fh = utils.open_file(old_file)
2466                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2467                             old_file_fh.close()
2468                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2469                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2470                                 x = i
2471
2472                     old_file = os.path.join(i.location.path, i.filename)
2473                     old_file_fh = utils.open_file(old_file)
2474                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2475                     old_file_fh.close()
2476                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2477                     found = old_file
2478                     suite_type = x.location.archive_type
2479                     # need this for updating dsc_files in install()
2480                     dsc_entry["files id"] = x.file_id
2481                     # See install() in process-accepted...
2482                     if not orig_files.has_key(dsc_name):
2483                         orig_files[dsc_name] = {}
2484                     orig_files[dsc_name]["id"] = x.file_id
2485                     orig_files[dsc_name]["path"] = old_file
2486                     orig_files[dsc_name]["location"] = x.location.location_id
2487                 else:
2488                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2489                     # Not there? Check the queue directories...
2490                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2491                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2492                             continue
2493                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2494                         if os.path.exists(in_otherdir):
2495                             in_otherdir_fh = utils.open_file(in_otherdir)
2496                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2497                             in_otherdir_fh.close()
2498                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2499                             found = in_otherdir
2500                             if not orig_files.has_key(dsc_name):
2501                                 orig_files[dsc_name] = {}
2502                             orig_files[dsc_name]["path"] = in_otherdir
2503
2504                     if not found:
2505                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2506                         continue
2507             else:
2508                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2509                 continue
2510             if actual_md5 != dsc_entry["md5sum"]:
2511                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2512             if actual_size != int(dsc_entry["size"]):
2513                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2514
2515     ################################################################################
2516     # This is used by process-new and process-holding to recheck a changes file
2517     # at the time we're running.  It mainly wraps various other internal functions
2518     # and is similar to accepted_checks - these should probably be tidied up
2519     # and combined
2520     def recheck(self, session):
2521         cnf = Config()
2522         for f in self.pkg.files.keys():
2523             # The .orig.tar.gz can disappear out from under us is it's a
2524             # duplicate of one in the archive.
2525             if not self.pkg.files.has_key(f):
2526                 continue
2527
2528             entry = self.pkg.files[f]
2529
2530             # Check that the source still exists
2531             if entry["type"] == "deb":
2532                 source_version = entry["source version"]
2533                 source_package = entry["source package"]
2534                 if not self.pkg.changes["architecture"].has_key("source") \
2535                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2536                     source_epochless_version = re_no_epoch.sub('', source_version)
2537                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2538                     found = False
2539                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2540                         if cnf.has_key("Dir::Queue::%s" % (q)):
2541                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2542                                 found = True
2543                     if not found:
2544                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2545
2546             # Version and file overwrite checks
2547             if entry["type"] == "deb":
2548                 self.check_binary_against_db(f, session)
2549             elif entry["type"] == "dsc":
2550                 self.check_source_against_db(f, session)
2551                 self.check_dsc_against_db(f, session)
2552
2553     ################################################################################
2554     def accepted_checks(self, overwrite_checks, session):
2555         # Recheck anything that relies on the database; since that's not
2556         # frozen between accept and our run time when called from p-a.
2557
2558         # overwrite_checks is set to False when installing to stable/oldstable
2559
2560         propogate={}
2561         nopropogate={}
2562
2563         # Find the .dsc (again)
2564         dsc_filename = None
2565         for f in self.pkg.files.keys():
2566             if self.pkg.files[f]["type"] == "dsc":
2567                 dsc_filename = f
2568
2569         for checkfile in self.pkg.files.keys():
2570             # The .orig.tar.gz can disappear out from under us is it's a
2571             # duplicate of one in the archive.
2572             if not self.pkg.files.has_key(checkfile):
2573                 continue
2574
2575             entry = self.pkg.files[checkfile]
2576
2577             # Check that the source still exists
2578             if entry["type"] == "deb":
2579                 source_version = entry["source version"]
2580                 source_package = entry["source package"]
2581                 if not self.pkg.changes["architecture"].has_key("source") \
2582                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2583                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2584
2585             # Version and file overwrite checks
2586             if overwrite_checks:
2587                 if entry["type"] == "deb":
2588                     self.check_binary_against_db(checkfile, session)
2589                 elif entry["type"] == "dsc":
2590                     self.check_source_against_db(checkfile, session)
2591                     self.check_dsc_against_db(dsc_filename, session)
2592
2593             # propogate in the case it is in the override tables:
2594             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2595                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2596                     propogate[suite] = 1
2597                 else:
2598                     nopropogate[suite] = 1
2599
2600         for suite in propogate.keys():
2601             if suite in nopropogate:
2602                 continue
2603             self.pkg.changes["distribution"][suite] = 1
2604
2605         for checkfile in self.pkg.files.keys():
2606             # Check the package is still in the override tables
2607             for suite in self.pkg.changes["distribution"].keys():
2608                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2609                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2610
2611     ################################################################################
2612     # This is not really a reject, but an unaccept, but since a) the code for
2613     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2614     # extremely rare, for now we'll go with whining at our admin folks...
2615
2616     def do_unaccept(self):
2617         cnf = Config()
2618
2619         self.update_subst()
2620         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2621         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2622         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2623         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2624         if cnf.has_key("Dinstall::Bcc"):
2625             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2626
2627         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2628
2629         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2630
2631         # Write the rejection email out as the <foo>.reason file
2632         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2633         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2634
2635         # If we fail here someone is probably trying to exploit the race
2636         # so let's just raise an exception ...
2637         if os.path.exists(reject_filename):
2638             os.unlink(reject_filename)
2639
2640         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2641         os.write(fd, reject_mail_message)
2642         os.close(fd)
2643
2644         utils.send_mail(reject_mail_message)
2645
2646         del self.Subst["__REJECTOR_ADDRESS__"]
2647         del self.Subst["__REJECT_MESSAGE__"]
2648         del self.Subst["__CC__"]
2649
2650     ################################################################################
2651     # If any file of an upload has a recent mtime then chances are good
2652     # the file is still being uploaded.
2653
2654     def upload_too_new(self):
2655         cnf = Config()
2656         too_new = False
2657         # Move back to the original directory to get accurate time stamps
2658         cwd = os.getcwd()
2659         os.chdir(self.pkg.directory)
2660         file_list = self.pkg.files.keys()
2661         file_list.extend(self.pkg.dsc_files.keys())
2662         file_list.append(self.pkg.changes_file)
2663         for f in file_list:
2664             try:
2665                 last_modified = time.time()-os.path.getmtime(f)
2666                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2667                     too_new = True
2668                     break
2669             except:
2670                 pass
2671
2672         os.chdir(cwd)
2673         return too_new
2674
2675     def store_changelog(self):
2676
2677         # Skip binary-only upload if it is not a bin-NMU
2678         if not self.pkg.changes['architecture'].has_key('source'):
2679             from daklib.regexes import re_bin_only_nmu
2680             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2681                 return
2682
2683         session = DBConn().session()
2684
2685         # Check if upload already has a changelog entry
2686         query = """SELECT changelog_id FROM changes WHERE source = :source
2687                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2688         if session.execute(query, {'source': self.pkg.changes['source'], \
2689                                    'version': self.pkg.changes['version'], \
2690                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2691             session.commit()
2692             return
2693
2694         # Add current changelog text into changelogs_text table, return created ID
2695         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2696         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2697
2698         # Link ID to the upload available in changes table
2699         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2700                    AND version = :version AND architecture = :architecture"""
2701         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2702                                 'version': self.pkg.changes['version'], \
2703                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2704
2705         session.commit()