]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
bb62aaeee273c506502fef112218ac61dfb3321a
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42
43 import yaml
44
45 from dak_exceptions import *
46 from changes import *
47 from regexes import *
48 from config import Config
49 from holding import Holding
50 from dbconn import *
51 from summarystats import SummaryStats
52 from utils import parse_changes, check_dsc_files
53 from textutils import fix_maintainer
54 from binary import Binary
55
56 ###############################################################################
57
58 def get_type(f, session):
59     """
60     Get the file type of C{f}
61
62     @type f: dict
63     @param f: file entry from Changes object
64
65     @type session: SQLA Session
66     @param session: SQL Alchemy session object
67
68     @rtype: string
69     @return: filetype
70
71     """
72     # Determine the type
73     if f.has_key("dbtype"):
74         file_type = f["dbtype"]
75     elif re_source_ext.match(f["type"]):
76         file_type = "dsc"
77     else:
78         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
79
80     # Validate the override type
81     type_id = get_override_type(file_type, session)
82     if type_id is None:
83         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
84
85     return file_type
86
87 ################################################################################
88
89 # Determine what parts in a .changes are NEW
90
91 def determine_new(changes, files, warn=1):
92     """
93     Determine what parts in a C{changes} file are NEW.
94
95     @type changes: Upload.Pkg.changes dict
96     @param changes: Changes dictionary
97
98     @type files: Upload.Pkg.files dict
99     @param files: Files dictionary
100
101     @type warn: bool
102     @param warn: Warn if overrides are added for (old)stable
103
104     @rtype: dict
105     @return: dictionary of NEW components.
106
107     """
108     new = {}
109
110     session = DBConn().session()
111
112     # Build up a list of potentially new things
113     for name, f in files.items():
114         # Skip byhand elements
115         if f["type"] == "byhand":
116             continue
117         pkg = f["package"]
118         priority = f["priority"]
119         section = f["section"]
120         file_type = get_type(f, session)
121         component = f["component"]
122
123         if file_type == "dsc":
124             priority = "source"
125
126         if not new.has_key(pkg):
127             new[pkg] = {}
128             new[pkg]["priority"] = priority
129             new[pkg]["section"] = section
130             new[pkg]["type"] = file_type
131             new[pkg]["component"] = component
132             new[pkg]["files"] = []
133         else:
134             old_type = new[pkg]["type"]
135             if old_type != file_type:
136                 # source gets trumped by deb or udeb
137                 if old_type == "dsc":
138                     new[pkg]["priority"] = priority
139                     new[pkg]["section"] = section
140                     new[pkg]["type"] = file_type
141                     new[pkg]["component"] = component
142
143         new[pkg]["files"].append(name)
144
145         if f.has_key("othercomponents"):
146             new[pkg]["othercomponents"] = f["othercomponents"]
147
148     for suite in changes["suite"].keys():
149         for pkg in new.keys():
150             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
151             if len(ql) > 0:
152                 for file_entry in new[pkg]["files"]:
153                     if files[file_entry].has_key("new"):
154                         del files[file_entry]["new"]
155                 del new[pkg]
156
157     if warn:
158         for s in ['stable', 'oldstable']:
159             if changes["suite"].has_key(s):
160                 print "WARNING: overrides will be added for %s!" % s
161         for pkg in new.keys():
162             if new[pkg].has_key("othercomponents"):
163                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
164
165     session.close()
166
167     return new
168
169 ################################################################################
170
171 def check_valid(new):
172     """
173     Check if section and priority for NEW packages exist in database.
174     Additionally does sanity checks:
175       - debian-installer packages have to be udeb (or source)
176       - non debian-installer packages can not be udeb
177       - source priority can only be assigned to dsc file types
178
179     @type new: dict
180     @param new: Dict of new packages with their section, priority and type.
181
182     """
183     for pkg in new.keys():
184         section_name = new[pkg]["section"]
185         priority_name = new[pkg]["priority"]
186         file_type = new[pkg]["type"]
187
188         section = get_section(section_name)
189         if section is None:
190             new[pkg]["section id"] = -1
191         else:
192             new[pkg]["section id"] = section.section_id
193
194         priority = get_priority(priority_name)
195         if priority is None:
196             new[pkg]["priority id"] = -1
197         else:
198             new[pkg]["priority id"] = priority.priority_id
199
200         # Sanity checks
201         di = section_name.find("debian-installer") != -1
202
203         # If d-i, we must be udeb and vice-versa
204         if     (di and file_type not in ("udeb", "dsc")) or \
205            (not di and file_type == "udeb"):
206             new[pkg]["section id"] = -1
207
208         # If dsc we need to be source and vice-versa
209         if (priority == "source" and file_type != "dsc") or \
210            (priority != "source" and file_type == "dsc"):
211             new[pkg]["priority id"] = -1
212
213 ###############################################################################
214
215 def check_status(files):
216     new = byhand = 0
217     for f in files.keys():
218         if files[f]["type"] == "byhand":
219             byhand = 1
220         elif files[f].has_key("new"):
221             new = 1
222     return (new, byhand)
223
224 ###############################################################################
225
226 # Used by Upload.check_timestamps
227 class TarTime(object):
228     def __init__(self, future_cutoff, past_cutoff):
229         self.reset()
230         self.future_cutoff = future_cutoff
231         self.past_cutoff = past_cutoff
232
233     def reset(self):
234         self.future_files = {}
235         self.ancient_files = {}
236
237     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
238         if MTime > self.future_cutoff:
239             self.future_files[Name] = MTime
240         if MTime < self.past_cutoff:
241             self.ancient_files[Name] = MTime
242
243 ###############################################################################
244
245 class Upload(object):
246     """
247     Everything that has to do with an upload processed.
248
249     """
250     def __init__(self):
251         self.logger = None
252         self.pkg = Changes()
253         self.reset()
254
255     ###########################################################################
256
257     def reset (self):
258         """ Reset a number of internal variables."""
259
260         # Initialize the substitution template map
261         cnf = Config()
262         self.Subst = {}
263         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
264         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
265         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
266         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
267
268         self.rejects = []
269         self.warnings = []
270         self.notes = []
271
272         self.pkg.reset()
273
274     def package_info(self):
275         """
276         Format various messages from this Upload to send to the maintainer.
277         """
278
279         msgs = (
280             ('Reject Reasons', self.rejects),
281             ('Warnings', self.warnings),
282             ('Notes', self.notes),
283         )
284
285         msg = ''
286         for title, messages in msgs:
287             if messages:
288                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
289
290         return msg
291
292     ###########################################################################
293     def update_subst(self):
294         """ Set up the per-package template substitution mappings """
295
296         cnf = Config()
297
298         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
299         if not self.pkg.changes.has_key("architecture") or not \
300            isinstance(self.pkg.changes["architecture"], dict):
301             self.pkg.changes["architecture"] = { "Unknown" : "" }
302
303         # and maintainer2047 may not exist.
304         if not self.pkg.changes.has_key("maintainer2047"):
305             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
306
307         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
308         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
309         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
310
311         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
312         if self.pkg.changes["architecture"].has_key("source") and \
313            self.pkg.changes["changedby822"] != "" and \
314            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
315
316             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
317             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
318             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
319         else:
320             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
321             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
322             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
323
324         if "sponsoremail" in self.pkg.changes:
325             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
326
327         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
328             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
329
330         # Apply any global override of the Maintainer field
331         if cnf.get("Dinstall::OverrideMaintainer"):
332             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
333             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
334
335         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
336         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
337         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
338
339     ###########################################################################
340     def load_changes(self, filename):
341         """
342         @rtype: boolean
343         @rvalue: whether the changes file was valid or not.  We may want to
344                  reject even if this is True (see what gets put in self.rejects).
345                  This is simply to prevent us even trying things later which will
346                  fail because we couldn't properly parse the file.
347         """
348         Cnf = Config()
349         self.pkg.changes_file = filename
350
351         # Parse the .changes field into a dictionary
352         try:
353             self.pkg.changes.update(parse_changes(filename))
354         except CantOpenError:
355             self.rejects.append("%s: can't read file." % (filename))
356             return False
357         except ParseChangesError, line:
358             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
359             return False
360         except ChangesUnicodeError:
361             self.rejects.append("%s: changes file not proper utf-8" % (filename))
362             return False
363
364         # Parse the Files field from the .changes into another dictionary
365         try:
366             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
367         except ParseChangesError, line:
368             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
369             return False
370         except UnknownFormatError, format:
371             self.rejects.append("%s: unknown format '%s'." % (filename, format))
372             return False
373
374         # Check for mandatory fields
375         for i in ("distribution", "source", "binary", "architecture",
376                   "version", "maintainer", "files", "changes", "description"):
377             if not self.pkg.changes.has_key(i):
378                 # Avoid undefined errors later
379                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
380                 return False
381
382         # Strip a source version in brackets from the source field
383         if re_strip_srcver.search(self.pkg.changes["source"]):
384             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
385
386         # Ensure the source field is a valid package name.
387         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
388             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
389
390         # Split multi-value fields into a lower-level dictionary
391         for i in ("architecture", "distribution", "binary", "closes"):
392             o = self.pkg.changes.get(i, "")
393             if o != "":
394                 del self.pkg.changes[i]
395
396             self.pkg.changes[i] = {}
397
398             for j in o.split():
399                 self.pkg.changes[i][j] = 1
400
401         # Fix the Maintainer: field to be RFC822/2047 compatible
402         try:
403             (self.pkg.changes["maintainer822"],
404              self.pkg.changes["maintainer2047"],
405              self.pkg.changes["maintainername"],
406              self.pkg.changes["maintaineremail"]) = \
407                    fix_maintainer (self.pkg.changes["maintainer"])
408         except ParseMaintError, msg:
409             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
410                    % (filename, self.pkg.changes["maintainer"], msg))
411
412         # ...likewise for the Changed-By: field if it exists.
413         try:
414             (self.pkg.changes["changedby822"],
415              self.pkg.changes["changedby2047"],
416              self.pkg.changes["changedbyname"],
417              self.pkg.changes["changedbyemail"]) = \
418                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
419         except ParseMaintError, msg:
420             self.pkg.changes["changedby822"] = ""
421             self.pkg.changes["changedby2047"] = ""
422             self.pkg.changes["changedbyname"] = ""
423             self.pkg.changes["changedbyemail"] = ""
424
425             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
426                    % (filename, changes["changed-by"], msg))
427
428         # Ensure all the values in Closes: are numbers
429         if self.pkg.changes.has_key("closes"):
430             for i in self.pkg.changes["closes"].keys():
431                 if re_isanum.match (i) == None:
432                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
433
434         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
435         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
436         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
437
438         # Check there isn't already a changes file of the same name in one
439         # of the queue directories.
440         base_filename = os.path.basename(filename)
441         if get_knownchange(base_filename):
442             self.rejects.append("%s: a file with this name already exists." % (base_filename))
443
444         # Check the .changes is non-empty
445         if not self.pkg.files:
446             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
447             return False
448
449         # Changes was syntactically valid even if we'll reject
450         return True
451
452     ###########################################################################
453
454     def check_distributions(self):
455         "Check and map the Distribution field"
456
457         Cnf = Config()
458
459         # Handle suite mappings
460         for m in Cnf.ValueList("SuiteMappings"):
461             args = m.split()
462             mtype = args[0]
463             if mtype == "map" or mtype == "silent-map":
464                 (source, dest) = args[1:3]
465                 if self.pkg.changes["distribution"].has_key(source):
466                     del self.pkg.changes["distribution"][source]
467                     self.pkg.changes["distribution"][dest] = 1
468                     if mtype != "silent-map":
469                         self.notes.append("Mapping %s to %s." % (source, dest))
470                 if self.pkg.changes.has_key("distribution-version"):
471                     if self.pkg.changes["distribution-version"].has_key(source):
472                         self.pkg.changes["distribution-version"][source]=dest
473             elif mtype == "map-unreleased":
474                 (source, dest) = args[1:3]
475                 if self.pkg.changes["distribution"].has_key(source):
476                     for arch in self.pkg.changes["architecture"].keys():
477                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
478                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
479                             del self.pkg.changes["distribution"][source]
480                             self.pkg.changes["distribution"][dest] = 1
481                             break
482             elif mtype == "ignore":
483                 suite = args[1]
484                 if self.pkg.changes["distribution"].has_key(suite):
485                     del self.pkg.changes["distribution"][suite]
486                     self.warnings.append("Ignoring %s as a target suite." % (suite))
487             elif mtype == "reject":
488                 suite = args[1]
489                 if self.pkg.changes["distribution"].has_key(suite):
490                     self.rejects.append("Uploads to %s are not accepted." % (suite))
491             elif mtype == "propup-version":
492                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
493                 #
494                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
495                 if self.pkg.changes["distribution"].has_key(args[1]):
496                     self.pkg.changes.setdefault("distribution-version", {})
497                     for suite in args[2:]:
498                         self.pkg.changes["distribution-version"][suite] = suite
499
500         # Ensure there is (still) a target distribution
501         if len(self.pkg.changes["distribution"].keys()) < 1:
502             self.rejects.append("No valid distribution remaining.")
503
504         # Ensure target distributions exist
505         for suite in self.pkg.changes["distribution"].keys():
506             if not Cnf.has_key("Suite::%s" % (suite)):
507                 self.rejects.append("Unknown distribution `%s'." % (suite))
508
509     ###########################################################################
510
511     def binary_file_checks(self, f, session):
512         cnf = Config()
513         entry = self.pkg.files[f]
514
515         # Extract package control information
516         deb_file = utils.open_file(f)
517         try:
518             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
519         except:
520             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
521             deb_file.close()
522             # Can't continue, none of the checks on control would work.
523             return
524
525         # Check for mandantory "Description:"
526         deb_file.seek(0)
527         try:
528             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
529         except:
530             self.rejects.append("%s: Missing Description in binary package" % (f))
531             return
532
533         deb_file.close()
534
535         # Check for mandatory fields
536         for field in [ "Package", "Architecture", "Version" ]:
537             if control.Find(field) == None:
538                 # Can't continue
539                 self.rejects.append("%s: No %s field in control." % (f, field))
540                 return
541
542         # Ensure the package name matches the one give in the .changes
543         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
544             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
545
546         # Validate the package field
547         package = control.Find("Package")
548         if not re_valid_pkg_name.match(package):
549             self.rejects.append("%s: invalid package name '%s'." % (f, package))
550
551         # Validate the version field
552         version = control.Find("Version")
553         if not re_valid_version.match(version):
554             self.rejects.append("%s: invalid version number '%s'." % (f, version))
555
556         # Ensure the architecture of the .deb is one we know about.
557         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
558         architecture = control.Find("Architecture")
559         upload_suite = self.pkg.changes["distribution"].keys()[0]
560
561         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
562             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
563             self.rejects.append("Unknown architecture '%s'." % (architecture))
564
565         # Ensure the architecture of the .deb is one of the ones
566         # listed in the .changes.
567         if not self.pkg.changes["architecture"].has_key(architecture):
568             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
569
570         # Sanity-check the Depends field
571         depends = control.Find("Depends")
572         if depends == '':
573             self.rejects.append("%s: Depends field is empty." % (f))
574
575         # Sanity-check the Provides field
576         provides = control.Find("Provides")
577         if provides:
578             provide = re_spacestrip.sub('', provides)
579             if provide == '':
580                 self.rejects.append("%s: Provides field is empty." % (f))
581             prov_list = provide.split(",")
582             for prov in prov_list:
583                 if not re_valid_pkg_name.match(prov):
584                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
585
586         # Check the section & priority match those given in the .changes (non-fatal)
587         if     control.Find("Section") and entry["section"] != "" \
588            and entry["section"] != control.Find("Section"):
589             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
590                                 (f, control.Find("Section", ""), entry["section"]))
591         if control.Find("Priority") and entry["priority"] != "" \
592            and entry["priority"] != control.Find("Priority"):
593             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
594                                 (f, control.Find("Priority", ""), entry["priority"]))
595
596         entry["package"] = package
597         entry["architecture"] = architecture
598         entry["version"] = version
599         entry["maintainer"] = control.Find("Maintainer", "")
600
601         if f.endswith(".udeb"):
602             self.pkg.files[f]["dbtype"] = "udeb"
603         elif f.endswith(".deb"):
604             self.pkg.files[f]["dbtype"] = "deb"
605         else:
606             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
607
608         entry["source"] = control.Find("Source", entry["package"])
609
610         # Get the source version
611         source = entry["source"]
612         source_version = ""
613
614         if source.find("(") != -1:
615             m = re_extract_src_version.match(source)
616             source = m.group(1)
617             source_version = m.group(2)
618
619         if not source_version:
620             source_version = self.pkg.files[f]["version"]
621
622         entry["source package"] = source
623         entry["source version"] = source_version
624
625         # Ensure the filename matches the contents of the .deb
626         m = re_isadeb.match(f)
627
628         #  package name
629         file_package = m.group(1)
630         if entry["package"] != file_package:
631             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
632                                 (f, file_package, entry["dbtype"], entry["package"]))
633         epochless_version = re_no_epoch.sub('', control.Find("Version"))
634
635         #  version
636         file_version = m.group(2)
637         if epochless_version != file_version:
638             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
639                                 (f, file_version, entry["dbtype"], epochless_version))
640
641         #  architecture
642         file_architecture = m.group(3)
643         if entry["architecture"] != file_architecture:
644             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
645                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
646
647         # Check for existent source
648         source_version = entry["source version"]
649         source_package = entry["source package"]
650         if self.pkg.changes["architecture"].has_key("source"):
651             if source_version != self.pkg.changes["version"]:
652                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
653                                     (source_version, f, self.pkg.changes["version"]))
654         else:
655             # Check in the SQL database
656             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
657                 # Check in one of the other directories
658                 source_epochless_version = re_no_epoch.sub('', source_version)
659                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
660                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
661                     entry["byhand"] = 1
662                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
663                     entry["new"] = 1
664                 else:
665                     dsc_file_exists = False
666                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
667                         if cnf.has_key("Dir::Queue::%s" % (myq)):
668                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
669                                 dsc_file_exists = True
670                                 break
671
672                     if not dsc_file_exists:
673                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
674
675         # Check the version and for file overwrites
676         self.check_binary_against_db(f, session)
677
678         # Temporarily disable contents generation until we change the table storage layout
679         #b = Binary(f)
680         #b.scan_package()
681         #if len(b.rejects) > 0:
682         #    for j in b.rejects:
683         #        self.rejects.append(j)
684
685     def source_file_checks(self, f, session):
686         entry = self.pkg.files[f]
687
688         m = re_issource.match(f)
689         if not m:
690             return
691
692         entry["package"] = m.group(1)
693         entry["version"] = m.group(2)
694         entry["type"] = m.group(3)
695
696         # Ensure the source package name matches the Source filed in the .changes
697         if self.pkg.changes["source"] != entry["package"]:
698             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
699
700         # Ensure the source version matches the version in the .changes file
701         if re_is_orig_source.match(f):
702             changes_version = self.pkg.changes["chopversion2"]
703         else:
704             changes_version = self.pkg.changes["chopversion"]
705
706         if changes_version != entry["version"]:
707             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
708
709         # Ensure the .changes lists source in the Architecture field
710         if not self.pkg.changes["architecture"].has_key("source"):
711             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
712
713         # Check the signature of a .dsc file
714         if entry["type"] == "dsc":
715             # check_signature returns either:
716             #  (None, [list, of, rejects]) or (signature, [])
717             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
718             for j in rejects:
719                 self.rejects.append(j)
720
721         entry["architecture"] = "source"
722
723     def per_suite_file_checks(self, f, suite, session):
724         cnf = Config()
725         entry = self.pkg.files[f]
726         archive = utils.where_am_i()
727
728         # Skip byhand
729         if entry.has_key("byhand"):
730             return
731
732         # Check we have fields we need to do these checks
733         oktogo = True
734         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
735             if not entry.has_key(m):
736                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
737                 oktogo = False
738
739         if not oktogo:
740             return
741
742         # Handle component mappings
743         for m in cnf.ValueList("ComponentMappings"):
744             (source, dest) = m.split()
745             if entry["component"] == source:
746                 entry["original component"] = source
747                 entry["component"] = dest
748
749         # Ensure the component is valid for the target suite
750         if cnf.has_key("Suite:%s::Components" % (suite)) and \
751            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
752             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
753             return
754
755         # Validate the component
756         if not get_component(entry["component"], session):
757             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
758             return
759
760         # See if the package is NEW
761         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
762             entry["new"] = 1
763
764         # Validate the priority
765         if entry["priority"].find('/') != -1:
766             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
767
768         # Determine the location
769         location = cnf["Dir::Pool"]
770         l = get_location(location, entry["component"], archive, session)
771         if l is None:
772             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
773             entry["location id"] = -1
774         else:
775             entry["location id"] = l.location_id
776
777         # Check the md5sum & size against existing files (if any)
778         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
779
780         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
781                                          entry["size"], entry["md5sum"], entry["location id"])
782
783         if found is None:
784             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
785         elif found is False and poolfile is not None:
786             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
787         else:
788             if poolfile is None:
789                 entry["files id"] = None
790             else:
791                 entry["files id"] = poolfile.file_id
792
793         # Check for packages that have moved from one component to another
794         entry['suite'] = suite
795         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
796         if res.rowcount > 0:
797             entry["othercomponents"] = res.fetchone()[0]
798
799     def check_files(self, action=True):
800         file_keys = self.pkg.files.keys()
801         holding = Holding()
802         cnf = Config()
803
804         if action:
805             cwd = os.getcwd()
806             os.chdir(self.pkg.directory)
807             for f in file_keys:
808                 ret = holding.copy_to_holding(f)
809                 if ret is not None:
810                     # XXX: Should we bail out here or try and continue?
811                     self.rejects.append(ret)
812
813             os.chdir(cwd)
814
815         # Check there isn't already a .changes file of the same name in
816         # the proposed-updates "CopyChanges" storage directories.
817         # [NB: this check must be done post-suite mapping]
818         base_filename = os.path.basename(self.pkg.changes_file)
819
820         for suite in self.pkg.changes["distribution"].keys():
821             copychanges = "Suite::%s::CopyChanges" % (suite)
822             if cnf.has_key(copychanges) and \
823                    os.path.exists(os.path.join(cnf[copychanges], base_filename)):
824                 self.rejects.append("%s: a file with this name already exists in %s" \
825                            % (base_filename, cnf[copychanges]))
826
827         has_binaries = False
828         has_source = False
829
830         session = DBConn().session()
831
832         for f, entry in self.pkg.files.items():
833             # Ensure the file does not already exist in one of the accepted directories
834             for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
835                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
836                 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
837                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
838
839             if not re_taint_free.match(f):
840                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
841
842             # Check the file is readable
843             if os.access(f, os.R_OK) == 0:
844                 # When running in -n, copy_to_holding() won't have
845                 # generated the reject_message, so we need to.
846                 if action:
847                     if os.path.exists(f):
848                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
849                     else:
850                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
851                 entry["type"] = "unreadable"
852                 continue
853
854             # If it's byhand skip remaining checks
855             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
856                 entry["byhand"] = 1
857                 entry["type"] = "byhand"
858
859             # Checks for a binary package...
860             elif re_isadeb.match(f):
861                 has_binaries = True
862                 entry["type"] = "deb"
863
864                 # This routine appends to self.rejects/warnings as appropriate
865                 self.binary_file_checks(f, session)
866
867             # Checks for a source package...
868             elif re_issource.match(f):
869                 has_source = True
870
871                 # This routine appends to self.rejects/warnings as appropriate
872                 self.source_file_checks(f, session)
873
874             # Not a binary or source package?  Assume byhand...
875             else:
876                 entry["byhand"] = 1
877                 entry["type"] = "byhand"
878
879             # Per-suite file checks
880             entry["oldfiles"] = {}
881             for suite in self.pkg.changes["distribution"].keys():
882                 self.per_suite_file_checks(f, suite, session)
883
884         session.close()
885
886         # If the .changes file says it has source, it must have source.
887         if self.pkg.changes["architecture"].has_key("source"):
888             if not has_source:
889                 self.rejects.append("no source found and Architecture line in changes mention source.")
890
891             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
892                 self.rejects.append("source only uploads are not supported.")
893
894     ###########################################################################
895     def check_dsc(self, action=True, session=None):
896         """Returns bool indicating whether or not the source changes are valid"""
897         # Ensure there is source to check
898         if not self.pkg.changes["architecture"].has_key("source"):
899             return True
900
901         # Find the .dsc
902         dsc_filename = None
903         for f, entry in self.pkg.files.items():
904             if entry["type"] == "dsc":
905                 if dsc_filename:
906                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
907                     return False
908                 else:
909                     dsc_filename = f
910
911         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
912         if not dsc_filename:
913             self.rejects.append("source uploads must contain a dsc file")
914             return False
915
916         # Parse the .dsc file
917         try:
918             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
919         except CantOpenError:
920             # if not -n copy_to_holding() will have done this for us...
921             if not action:
922                 self.rejects.append("%s: can't read file." % (dsc_filename))
923         except ParseChangesError, line:
924             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
925         except InvalidDscError, line:
926             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
927         except ChangesUnicodeError:
928             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
929
930         # Build up the file list of files mentioned by the .dsc
931         try:
932             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
933         except NoFilesFieldError:
934             self.rejects.append("%s: no Files: field." % (dsc_filename))
935             return False
936         except UnknownFormatError, format:
937             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
938             return False
939         except ParseChangesError, line:
940             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941             return False
942
943         # Enforce mandatory fields
944         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
945             if not self.pkg.dsc.has_key(i):
946                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
947                 return False
948
949         # Validate the source and version fields
950         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
951             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
952         if not re_valid_version.match(self.pkg.dsc["version"]):
953             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
954
955         # Only a limited list of source formats are allowed in each suite
956         for dist in self.pkg.changes["distribution"].keys():
957             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
958             if self.pkg.dsc["format"] not in allowed:
959                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
960
961         # Validate the Maintainer field
962         try:
963             # We ignore the return value
964             fix_maintainer(self.pkg.dsc["maintainer"])
965         except ParseMaintError, msg:
966             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
967                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
968
969         # Validate the build-depends field(s)
970         for field_name in [ "build-depends", "build-depends-indep" ]:
971             field = self.pkg.dsc.get(field_name)
972             if field:
973                 # Have apt try to parse them...
974                 try:
975                     apt_pkg.ParseSrcDepends(field)
976                 except:
977                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
978
979         # Ensure the version number in the .dsc matches the version number in the .changes
980         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
981         changes_version = self.pkg.files[dsc_filename]["version"]
982
983         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
984             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
985
986         # Ensure the Files field contain only what's expected
987         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
988
989         # Ensure source is newer than existing source in target suites
990         session = DBConn().session()
991         self.check_source_against_db(dsc_filename, session)
992         self.check_dsc_against_db(dsc_filename, session)
993         session.close()
994
995         return True
996
997     ###########################################################################
998
999     def get_changelog_versions(self, source_dir):
1000         """Extracts a the source package and (optionally) grabs the
1001         version history out of debian/changelog for the BTS."""
1002
1003         cnf = Config()
1004
1005         # Find the .dsc (again)
1006         dsc_filename = None
1007         for f in self.pkg.files.keys():
1008             if self.pkg.files[f]["type"] == "dsc":
1009                 dsc_filename = f
1010
1011         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1012         if not dsc_filename:
1013             return
1014
1015         # Create a symlink mirror of the source files in our temporary directory
1016         for f in self.pkg.files.keys():
1017             m = re_issource.match(f)
1018             if m:
1019                 src = os.path.join(source_dir, f)
1020                 # If a file is missing for whatever reason, give up.
1021                 if not os.path.exists(src):
1022                     return
1023                 ftype = m.group(3)
1024                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1025                    self.pkg.orig_files[f].has_key("path"):
1026                     continue
1027                 dest = os.path.join(os.getcwd(), f)
1028                 os.symlink(src, dest)
1029
1030         # If the orig files are not a part of the upload, create symlinks to the
1031         # existing copies.
1032         for orig_file in self.pkg.orig_files.keys():
1033             if not self.pkg.orig_files[orig_file].has_key("path"):
1034                 continue
1035             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1036             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1037
1038         # Extract the source
1039         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1040         (result, output) = commands.getstatusoutput(cmd)
1041         if (result != 0):
1042             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1043             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1044             return
1045
1046         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1047             return
1048
1049         # Get the upstream version
1050         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1051         if re_strip_revision.search(upstr_version):
1052             upstr_version = re_strip_revision.sub('', upstr_version)
1053
1054         # Ensure the changelog file exists
1055         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1056         if not os.path.exists(changelog_filename):
1057             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1058             return
1059
1060         # Parse the changelog
1061         self.pkg.dsc["bts changelog"] = ""
1062         changelog_file = utils.open_file(changelog_filename)
1063         for line in changelog_file.readlines():
1064             m = re_changelog_versions.match(line)
1065             if m:
1066                 self.pkg.dsc["bts changelog"] += line
1067         changelog_file.close()
1068
1069         # Check we found at least one revision in the changelog
1070         if not self.pkg.dsc["bts changelog"]:
1071             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1072
1073     def check_source(self):
1074         # Bail out if:
1075         #    a) there's no source
1076         # or c) the orig files are MIA
1077         if not self.pkg.changes["architecture"].has_key("source") \
1078            or len(self.pkg.orig_files) == 0:
1079             return
1080
1081         tmpdir = utils.temp_dirname()
1082
1083         # Move into the temporary directory
1084         cwd = os.getcwd()
1085         os.chdir(tmpdir)
1086
1087         # Get the changelog version history
1088         self.get_changelog_versions(cwd)
1089
1090         # Move back and cleanup the temporary tree
1091         os.chdir(cwd)
1092
1093         try:
1094             shutil.rmtree(tmpdir)
1095         except OSError, e:
1096             if e.errno != errno.EACCES:
1097                 print "foobar"
1098                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1099
1100             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1101             # We probably have u-r or u-w directories so chmod everything
1102             # and try again.
1103             cmd = "chmod -R u+rwx %s" % (tmpdir)
1104             result = os.system(cmd)
1105             if result != 0:
1106                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1107             shutil.rmtree(tmpdir)
1108         except Exception, e:
1109             print "foobar2 (%s)" % e
1110             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1111
1112     ###########################################################################
1113     def ensure_hashes(self):
1114         # Make sure we recognise the format of the Files: field in the .changes
1115         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1116         if len(format) == 2:
1117             format = int(format[0]), int(format[1])
1118         else:
1119             format = int(float(format[0])), 0
1120
1121         # We need to deal with the original changes blob, as the fields we need
1122         # might not be in the changes dict serialised into the .dak anymore.
1123         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1124
1125         # Copy the checksums over to the current changes dict.  This will keep
1126         # the existing modifications to it intact.
1127         for field in orig_changes:
1128             if field.startswith('checksums-'):
1129                 self.pkg.changes[field] = orig_changes[field]
1130
1131         # Check for unsupported hashes
1132         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1133             self.rejects.append(j)
1134
1135         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1136             self.rejects.append(j)
1137
1138         # We have to calculate the hash if we have an earlier changes version than
1139         # the hash appears in rather than require it exist in the changes file
1140         for hashname, hashfunc, version in utils.known_hashes:
1141             # TODO: Move _ensure_changes_hash into this class
1142             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1143                 self.rejects.append(j)
1144             if "source" in self.pkg.changes["architecture"]:
1145                 # TODO: Move _ensure_dsc_hash into this class
1146                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1147                     self.rejects.append(j)
1148
1149     def check_hashes(self):
1150         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1151             self.rejects.append(m)
1152
1153         for m in utils.check_size(".changes", self.pkg.files):
1154             self.rejects.append(m)
1155
1156         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1157             self.rejects.append(m)
1158
1159         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1160             self.rejects.append(m)
1161
1162         self.ensure_hashes()
1163
1164     ###########################################################################
1165
1166     def ensure_orig(self, target_dir='.', session=None):
1167         """
1168         Ensures that all orig files mentioned in the changes file are present
1169         in target_dir. If they do not exist, they are symlinked into place.
1170
1171         An list containing the symlinks that were created are returned (so they
1172         can be removed).
1173         """
1174
1175         symlinked = []
1176         cnf = Config()
1177
1178         for filename, entry in self.pkg.dsc_files.iteritems():
1179             if not re_is_orig_source.match(filename):
1180                 # File is not an orig; ignore
1181                 continue
1182
1183             if os.path.exists(filename):
1184                 # File exists, no need to continue
1185                 continue
1186
1187             def symlink_if_valid(path):
1188                 f = utils.open_file(path)
1189                 md5sum = apt_pkg.md5sum(f)
1190                 f.close()
1191
1192                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1193                 expected = (int(entry['size']), entry['md5sum'])
1194
1195                 if fingerprint != expected:
1196                     return False
1197
1198                 dest = os.path.join(target_dir, filename)
1199
1200                 os.symlink(path, dest)
1201                 symlinked.append(dest)
1202
1203                 return True
1204
1205             session_ = session
1206             if session is None:
1207                 session_ = DBConn().session()
1208
1209             found = False
1210
1211             # Look in the pool
1212             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1213                 poolfile_path = os.path.join(
1214                     poolfile.location.path, poolfile.filename
1215                 )
1216
1217                 if symlink_if_valid(poolfile_path):
1218                     found = True
1219                     break
1220
1221             if session is None:
1222                 session_.close()
1223
1224             if found:
1225                 continue
1226
1227             # Look in some other queues for the file
1228             queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1229                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1230
1231             for queue in queues:
1232                 if not cnf.get('Dir::Queue::%s' % queue):
1233                     continue
1234
1235                 queuefile_path = os.path.join(
1236                     cnf['Dir::Queue::%s' % queue], filename
1237                 )
1238
1239                 if not os.path.exists(queuefile_path):
1240                     # Does not exist in this queue
1241                     continue
1242
1243                 if symlink_if_valid(queuefile_path):
1244                     break
1245
1246         return symlinked
1247
1248     ###########################################################################
1249
1250     def check_lintian(self):
1251         cnf = Config()
1252
1253         # Don't reject binary uploads
1254         if not self.pkg.changes['architecture'].has_key('source'):
1255             return
1256
1257         # Only check some distributions
1258         valid_dist = False
1259         for dist in ('unstable', 'experimental'):
1260             if dist in self.pkg.changes['distribution']:
1261                 valid_dist = True
1262                 break
1263
1264         if not valid_dist:
1265             return
1266
1267         tagfile = cnf.get("Dinstall::LintianTags")
1268         if tagfile is None:
1269             # We don't have a tagfile, so just don't do anything.
1270             return
1271
1272         # Parse the yaml file
1273         sourcefile = file(tagfile, 'r')
1274         sourcecontent = sourcefile.read()
1275         sourcefile.close()
1276         try:
1277             lintiantags = yaml.load(sourcecontent)['lintian']
1278         except yaml.YAMLError, msg:
1279             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1280             return
1281
1282         # Try and find all orig mentioned in the .dsc
1283         symlinked = self.ensure_orig()
1284
1285         # Now setup the input file for lintian. lintian wants "one tag per line" only,
1286         # so put it together like it. We put all types of tags in one file and then sort
1287         # through lintians output later to see if its a fatal tag we detected, or not.
1288         # So we only run lintian once on all tags, even if we might reject on some, but not
1289         # reject on others.
1290         # Additionally build up a set of tags
1291         tags = set()
1292         (fd, temp_filename) = utils.temp_filename()
1293         temptagfile = os.fdopen(fd, 'w')
1294         for tagtype in lintiantags:
1295             for tag in lintiantags[tagtype]:
1296                 temptagfile.write("%s\n" % tag)
1297                 tags.add(tag)
1298         temptagfile.close()
1299
1300         # So now we should look at running lintian at the .changes file, capturing output
1301         # to then parse it.
1302         command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1303         (result, output) = commands.getstatusoutput(command)
1304
1305         # We are done with lintian, remove our tempfile and any symlinks we created
1306         os.unlink(temp_filename)
1307         for symlink in symlinked:
1308             os.unlink(symlink)
1309
1310         if (result == 2):
1311             utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1312             utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1313
1314         if len(output) == 0:
1315             return
1316
1317         def log(*txt):
1318             if self.logger:
1319                 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1320
1321         # We have output of lintian, this package isn't clean. Lets parse it and see if we
1322         # are having a victim for a reject.
1323         # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1324         for line in output.split('\n'):
1325             m = re_parse_lintian.match(line)
1326             if m is None:
1327                 continue
1328
1329             etype = m.group(1)
1330             epackage = m.group(2)
1331             etag = m.group(3)
1332             etext = m.group(4)
1333
1334             # So lets check if we know the tag at all.
1335             if etag not in tags:
1336                 continue
1337
1338             if etype == 'O':
1339                 # We know it and it is overriden. Check that override is allowed.
1340                 if etag in lintiantags['warning']:
1341                     # The tag is overriden, and it is allowed to be overriden.
1342                     # Don't add a reject message.
1343                     pass
1344                 elif etag in lintiantags['error']:
1345                     # The tag is overriden - but is not allowed to be
1346                     self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1347                     log("ftpmaster does not allow tag to be overridable", etag)
1348             else:
1349                 # Tag is known, it is not overriden, direct reject.
1350                 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1351                 # Now tell if they *might* override it.
1352                 if etag in lintiantags['warning']:
1353                     log("auto rejecting", "overridable", etag)
1354                     self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1355                 else:
1356                     log("auto rejecting", "not overridable", etag)
1357
1358     ###########################################################################
1359     def check_urgency(self):
1360         cnf = Config()
1361         if self.pkg.changes["architecture"].has_key("source"):
1362             if not self.pkg.changes.has_key("urgency"):
1363                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1364             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1365             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1366                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1367                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1368                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1369
1370     ###########################################################################
1371
1372     # Sanity check the time stamps of files inside debs.
1373     # [Files in the near future cause ugly warnings and extreme time
1374     #  travel can cause errors on extraction]
1375
1376     def check_timestamps(self):
1377         Cnf = Config()
1378
1379         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1380         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1381         tar = TarTime(future_cutoff, past_cutoff)
1382
1383         for filename, entry in self.pkg.files.items():
1384             if entry["type"] == "deb":
1385                 tar.reset()
1386                 try:
1387                     deb_file = utils.open_file(filename)
1388                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1389                     deb_file.seek(0)
1390                     try:
1391                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1392                     except SystemError, e:
1393                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1394                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1395                             raise
1396                         deb_file.seek(0)
1397                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1398
1399                     deb_file.close()
1400
1401                     future_files = tar.future_files.keys()
1402                     if future_files:
1403                         num_future_files = len(future_files)
1404                         future_file = future_files[0]
1405                         future_date = tar.future_files[future_file]
1406                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1407                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1408
1409                     ancient_files = tar.ancient_files.keys()
1410                     if ancient_files:
1411                         num_ancient_files = len(ancient_files)
1412                         ancient_file = ancient_files[0]
1413                         ancient_date = tar.ancient_files[ancient_file]
1414                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1415                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1416                 except:
1417                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1418
1419     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1420         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1421             sponsored = False
1422         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1423             sponsored = False
1424             if uid_name == "":
1425                 sponsored = True
1426         else:
1427             sponsored = True
1428             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1429                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1430                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1431                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1432                         self.pkg.changes["sponsoremail"] = uid_email
1433
1434         return sponsored
1435
1436
1437     ###########################################################################
1438     # check_signed_by_key checks
1439     ###########################################################################
1440
1441     def check_signed_by_key(self):
1442         """Ensure the .changes is signed by an authorized uploader."""
1443         session = DBConn().session()
1444
1445         # First of all we check that the person has proper upload permissions
1446         # and that this upload isn't blocked
1447         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1448
1449         if fpr is None:
1450             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1451             return
1452
1453         # TODO: Check that import-keyring adds UIDs properly
1454         if not fpr.uid:
1455             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1456             return
1457
1458         # Check that the fingerprint which uploaded has permission to do so
1459         self.check_upload_permissions(fpr, session)
1460
1461         # Check that this package is not in a transition
1462         self.check_transition(session)
1463
1464         session.close()
1465
1466
1467     def check_upload_permissions(self, fpr, session):
1468         # Check any one-off upload blocks
1469         self.check_upload_blocks(fpr, session)
1470
1471         # Start with DM as a special case
1472         # DM is a special case unfortunately, so we check it first
1473         # (keys with no source access get more access than DMs in one
1474         #  way; DMs can only upload for their packages whether source
1475         #  or binary, whereas keys with no access might be able to
1476         #  upload some binaries)
1477         if fpr.source_acl.access_level == 'dm':
1478             self.check_dm_upload(fpr, session)
1479         else:
1480             # Check source-based permissions for other types
1481             if self.pkg.changes["architecture"].has_key("source"):
1482                 if fpr.source_acl.access_level is None:
1483                     rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1484                     rej += '\nPlease contact ftpmaster if you think this is incorrect'
1485                     self.rejects.append(rej)
1486                     return
1487             else:
1488                 # If not a DM, we allow full upload rights
1489                 uid_email = "%s@debian.org" % (fpr.uid.uid)
1490                 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1491
1492
1493         # Check binary upload permissions
1494         # By this point we know that DMs can't have got here unless they
1495         # are allowed to deal with the package concerned so just apply
1496         # normal checks
1497         if fpr.binary_acl.access_level == 'full':
1498             return
1499
1500         # Otherwise we're in the map case
1501         tmparches = self.pkg.changes["architecture"].copy()
1502         tmparches.pop('source', None)
1503
1504         for bam in fpr.binary_acl_map:
1505             tmparches.pop(bam.architecture.arch_string, None)
1506
1507         if len(tmparches.keys()) > 0:
1508             if fpr.binary_reject:
1509                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1510                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1511                 self.rejects.append(rej)
1512             else:
1513                 # TODO: This is where we'll implement reject vs throw away binaries later
1514                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1515                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1516                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1517                 self.rejects.append(rej)
1518
1519
1520     def check_upload_blocks(self, fpr, session):
1521         """Check whether any upload blocks apply to this source, source
1522            version, uid / fpr combination"""
1523
1524         def block_rej_template(fb):
1525             rej = 'Manual upload block in place for package %s' % fb.source
1526             if fb.version is not None:
1527                 rej += ', version %s' % fb.version
1528             return rej
1529
1530         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1531             # version is None if the block applies to all versions
1532             if fb.version is None or fb.version == self.pkg.changes['version']:
1533                 # Check both fpr and uid - either is enough to cause a reject
1534                 if fb.fpr is not None:
1535                     if fb.fpr.fingerprint == fpr.fingerprint:
1536                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1537                 if fb.uid is not None:
1538                     if fb.uid == fpr.uid:
1539                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1540
1541
1542     def check_dm_upload(self, fpr, session):
1543         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1544         ## none of the uploaded packages are NEW
1545         rej = False
1546         for f in self.pkg.files.keys():
1547             if self.pkg.files[f].has_key("byhand"):
1548                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1549                 rej = True
1550             if self.pkg.files[f].has_key("new"):
1551                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1552                 rej = True
1553
1554         if rej:
1555             return
1556
1557         ## the most recent version of the package uploaded to unstable or
1558         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1559         ## section of its control file
1560         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1561         q = q.join(SrcAssociation)
1562         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1563         q = q.order_by(desc('source.version')).limit(1)
1564
1565         r = q.all()
1566
1567         if len(r) != 1:
1568             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1569             self.rejects.append(rej)
1570             return
1571
1572         r = r[0]
1573         if not r.dm_upload_allowed:
1574             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1575             self.rejects.append(rej)
1576             return
1577
1578         ## the Maintainer: field of the uploaded .changes file corresponds with
1579         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1580         ## uploads)
1581         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1582             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1583
1584         ## the most recent version of the package uploaded to unstable or
1585         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1586         ## non-developer maintainers cannot NMU or hijack packages)
1587
1588         # srcuploaders includes the maintainer
1589         accept = False
1590         for sup in r.srcuploaders:
1591             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1592             # Eww - I hope we never have two people with the same name in Debian
1593             if email == fpr.uid.uid or name == fpr.uid.name:
1594                 accept = True
1595                 break
1596
1597         if not accept:
1598             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1599             return
1600
1601         ## none of the packages are being taken over from other source packages
1602         for b in self.pkg.changes["binary"].keys():
1603             for suite in self.pkg.changes["distribution"].keys():
1604                 q = session.query(DBSource)
1605                 q = q.join(DBBinary).filter_by(package=b)
1606                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1607
1608                 for s in q.all():
1609                     if s.source != self.pkg.changes["source"]:
1610                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1611
1612
1613
1614     def check_transition(self, session):
1615         cnf = Config()
1616
1617         sourcepkg = self.pkg.changes["source"]
1618
1619         # No sourceful upload -> no need to do anything else, direct return
1620         # We also work with unstable uploads, not experimental or those going to some
1621         # proposed-updates queue
1622         if "source" not in self.pkg.changes["architecture"] or \
1623            "unstable" not in self.pkg.changes["distribution"]:
1624             return
1625
1626         # Also only check if there is a file defined (and existant) with
1627         # checks.
1628         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1629         if transpath == "" or not os.path.exists(transpath):
1630             return
1631
1632         # Parse the yaml file
1633         sourcefile = file(transpath, 'r')
1634         sourcecontent = sourcefile.read()
1635         try:
1636             transitions = yaml.load(sourcecontent)
1637         except yaml.YAMLError, msg:
1638             # This shouldn't happen, there is a wrapper to edit the file which
1639             # checks it, but we prefer to be safe than ending up rejecting
1640             # everything.
1641             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1642             return
1643
1644         # Now look through all defined transitions
1645         for trans in transitions:
1646             t = transitions[trans]
1647             source = t["source"]
1648             expected = t["new"]
1649
1650             # Will be None if nothing is in testing.
1651             current = get_source_in_suite(source, "testing", session)
1652             if current is not None:
1653                 compare = apt_pkg.VersionCompare(current.version, expected)
1654
1655             if current is None or compare < 0:
1656                 # This is still valid, the current version in testing is older than
1657                 # the new version we wait for, or there is none in testing yet
1658
1659                 # Check if the source we look at is affected by this.
1660                 if sourcepkg in t['packages']:
1661                     # The source is affected, lets reject it.
1662
1663                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1664                         sourcepkg, trans)
1665
1666                     if current is not None:
1667                         currentlymsg = "at version %s" % (current.version)
1668                     else:
1669                         currentlymsg = "not present in testing"
1670
1671                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1672
1673                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1674 is part of a testing transition designed to get %s migrated (it is
1675 currently %s, we need version %s).  This transition is managed by the
1676 Release Team, and %s is the Release-Team member responsible for it.
1677 Please mail debian-release@lists.debian.org or contact %s directly if you
1678 need further assistance.  You might want to upload to experimental until this
1679 transition is done."""
1680                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1681
1682                     self.rejects.append(rejectmsg)
1683                     return
1684
1685     ###########################################################################
1686     # End check_signed_by_key checks
1687     ###########################################################################
1688
1689     def build_summaries(self):
1690         """ Build a summary of changes the upload introduces. """
1691
1692         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1693
1694         short_summary = summary
1695
1696         # This is for direport's benefit...
1697         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1698
1699         if byhand or new:
1700             summary += "Changes: " + f
1701
1702         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1703
1704         summary += self.announce(short_summary, 0)
1705
1706         return (summary, short_summary)
1707
1708     ###########################################################################
1709
1710     def close_bugs(self, summary, action):
1711         """
1712         Send mail to close bugs as instructed by the closes field in the changes file.
1713         Also add a line to summary if any work was done.
1714
1715         @type summary: string
1716         @param summary: summary text, as given by L{build_summaries}
1717
1718         @type action: bool
1719         @param action: Set to false no real action will be done.
1720
1721         @rtype: string
1722         @return: summary. If action was taken, extended by the list of closed bugs.
1723
1724         """
1725
1726         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1727
1728         bugs = self.pkg.changes["closes"].keys()
1729
1730         if not bugs:
1731             return summary
1732
1733         bugs.sort()
1734         summary += "Closing bugs: "
1735         for bug in bugs:
1736             summary += "%s " % (bug)
1737             if action:
1738                 self.update_subst()
1739                 self.Subst["__BUG_NUMBER__"] = bug
1740                 if self.pkg.changes["distribution"].has_key("stable"):
1741                     self.Subst["__STABLE_WARNING__"] = """
1742 Note that this package is not part of the released stable Debian
1743 distribution.  It may have dependencies on other unreleased software,
1744 or other instabilities.  Please take care if you wish to install it.
1745 The update will eventually make its way into the next released Debian
1746 distribution."""
1747                 else:
1748                     self.Subst["__STABLE_WARNING__"] = ""
1749                 mail_message = utils.TemplateSubst(self.Subst, template)
1750                 utils.send_mail(mail_message)
1751
1752                 # Clear up after ourselves
1753                 del self.Subst["__BUG_NUMBER__"]
1754                 del self.Subst["__STABLE_WARNING__"]
1755
1756         if action and self.logger:
1757             self.logger.log(["closing bugs"] + bugs)
1758
1759         summary += "\n"
1760
1761         return summary
1762
1763     ###########################################################################
1764
1765     def announce(self, short_summary, action):
1766         """
1767         Send an announce mail about a new upload.
1768
1769         @type short_summary: string
1770         @param short_summary: Short summary text to include in the mail
1771
1772         @type action: bool
1773         @param action: Set to false no real action will be done.
1774
1775         @rtype: string
1776         @return: Textstring about action taken.
1777
1778         """
1779
1780         cnf = Config()
1781         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1782
1783         # Only do announcements for source uploads with a recent dpkg-dev installed
1784         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1785            self.pkg.changes["architecture"].has_key("source"):
1786             return ""
1787
1788         lists_done = {}
1789         summary = ""
1790
1791         self.Subst["__SHORT_SUMMARY__"] = short_summary
1792
1793         for dist in self.pkg.changes["distribution"].keys():
1794             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1795             if announce_list == "" or lists_done.has_key(announce_list):
1796                 continue
1797
1798             lists_done[announce_list] = 1
1799             summary += "Announcing to %s\n" % (announce_list)
1800
1801             if action:
1802                 self.update_subst()
1803                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1804                 if cnf.get("Dinstall::TrackingServer") and \
1805                    self.pkg.changes["architecture"].has_key("source"):
1806                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1807                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1808
1809                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1810                 utils.send_mail(mail_message)
1811
1812                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1813
1814         if cnf.FindB("Dinstall::CloseBugs"):
1815             summary = self.close_bugs(summary, action)
1816
1817         del self.Subst["__SHORT_SUMMARY__"]
1818
1819         return summary
1820
1821     ###########################################################################
1822
1823     def accept (self, summary, short_summary, session):
1824         """
1825         Accept an upload.
1826
1827         This moves all files referenced from the .changes into the pool,
1828         sends the accepted mail, announces to lists, closes bugs and
1829         also checks for override disparities. If enabled it will write out
1830         the version history for the BTS Version Tracking and will finally call
1831         L{queue_build}.
1832
1833         @type summary: string
1834         @param summary: Summary text
1835
1836         @type short_summary: string
1837         @param short_summary: Short summary
1838         """
1839
1840         cnf = Config()
1841         stats = SummaryStats()
1842
1843         print "Installing."
1844         Logger.log(["installing changes", u.pkg.changes_file])
1845
1846         # Add the .dsc file to the DB first
1847         for newfile, entry in u.pkg.files.items():
1848             if entry["type"] == "dsc":
1849                 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
1850
1851         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1852         for newfile, entry in u.pkg.files.items():
1853             if entry["type"] == "deb":
1854                 add_deb_to_db(u, newfile, session)
1855
1856         # If this is a sourceful diff only upload that is moving
1857         # cross-component we need to copy the .orig files into the new
1858         # component too for the same reasons as above.
1859         if u.pkg.changes["architecture"].has_key("source"):
1860             for orig_file in u.pkg.orig_files.keys():
1861                 if not u.pkg.orig_files[orig_file].has_key("id"):
1862                     continue # Skip if it's not in the pool
1863                 orig_file_id = u.pkg.orig_files[orig_file]["id"]
1864                 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1865                     continue # Skip if the location didn't change
1866
1867                 # Do the move
1868                 oldf = get_poolfile_by_id(orig_file_id, session)
1869                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1870                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1871                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1872
1873                 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1874
1875                 # TODO: Care about size/md5sum collisions etc
1876                 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1877
1878                 if newf is None:
1879                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1880                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1881
1882                     # TODO: Check that there's only 1 here
1883                     source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
1884                     dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1885                     dscf.poolfile_id = newf.file_id
1886                     session.add(dscf)
1887                     session.flush()
1888
1889         # Install the files into the pool
1890         for newfile, entry in u.pkg.files.items():
1891             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1892             utils.move(newfile, destination)
1893             Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1894             summarystats.accept_bytes += float(entry["size"])
1895
1896         # Copy the .changes file across for suite which need it.
1897         copy_changes = {}
1898         for suite_name in u.pkg.changes["distribution"].keys():
1899             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1900                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1901
1902         for dest in copy_changes.keys():
1903             utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1904
1905         # We're done - commit the database changes
1906         session.commit()
1907         # Our SQL session will automatically start a new transaction after
1908         # the last commit
1909
1910         # Move the .changes into the 'done' directory
1911         utils.move(u.pkg.changes_file,
1912                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
1913
1914         if u.pkg.changes["architecture"].has_key("source") and log_urgency:
1915             UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
1916
1917         # Send accept mail, announce to lists, close bugs and check for
1918         # override disparities
1919         if not cnf["Dinstall::Options::No-Mail"]:
1920             self.update_subst()
1921             self.Subst["__SUITE__"] = ""
1922             self.Subst["__SUMMARY__"] = summary
1923             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1924             utils.send_mail(mail_message)
1925             self.announce(short_summary, 1)
1926
1927         ## Helper stuff for DebBugs Version Tracking
1928         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1929             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1930             # the conditionalization on dsc["bts changelog"] should be
1931             # dropped.
1932
1933             # Write out the version history from the changelog
1934             if self.pkg.changes["architecture"].has_key("source") and \
1935                self.pkg.dsc.has_key("bts changelog"):
1936
1937                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1938                 version_history = os.fdopen(fd, 'w')
1939                 version_history.write(self.pkg.dsc["bts changelog"])
1940                 version_history.close()
1941                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1942                                       self.pkg.changes_file[:-8]+".versions")
1943                 os.rename(temp_filename, filename)
1944                 os.chmod(filename, 0644)
1945
1946             # Write out the binary -> source mapping.
1947             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1948             debinfo = os.fdopen(fd, 'w')
1949             for name, entry in sorted(self.pkg.files.items()):
1950                 if entry["type"] == "deb":
1951                     line = " ".join([entry["package"], entry["version"],
1952                                      entry["architecture"], entry["source package"],
1953                                      entry["source version"]])
1954                     debinfo.write(line+"\n")
1955             debinfo.close()
1956             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1957                                   self.pkg.changes_file[:-8]+".debinfo")
1958             os.rename(temp_filename, filename)
1959             os.chmod(filename, 0644)
1960
1961         # auto-build queue
1962 #        res = get_or_set_queue('buildd', session).autobuild_upload(self.pkg, session)
1963 #        if res:
1964 #            utils.fubar(res)
1965 #            now_date = datetime.now()
1966
1967         session.commit()
1968
1969         # Finally...
1970         summarystats.accept_count += 1
1971
1972     def check_override(self):
1973         """
1974         Checks override entries for validity. Mails "Override disparity" warnings,
1975         if that feature is enabled.
1976
1977         Abandons the check if
1978           - override disparity checks are disabled
1979           - mail sending is disabled
1980         """
1981
1982         cnf = Config()
1983
1984         # Abandon the check if:
1985         #  a) override disparity checks have been disabled
1986         #  b) we're not sending mail
1987         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1988            cnf["Dinstall::Options::No-Mail"]:
1989             return
1990
1991         summary = self.pkg.check_override()
1992
1993         if summary == "":
1994             return
1995
1996         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1997
1998         self.update_subst()
1999         self.Subst["__SUMMARY__"] = summary
2000         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2001         utils.send_mail(mail_message)
2002         del self.Subst["__SUMMARY__"]
2003
2004     ###########################################################################
2005
2006     def remove(self, from_dir=None):
2007         """
2008         Used (for instance) in p-u to remove the package from unchecked
2009
2010         Also removes the package from holding area.
2011         """
2012         if from_dir is None:
2013             from_dir = self.pkg.directory
2014         h = Holding()
2015
2016         for f in self.pkg.files.keys():
2017             os.unlink(os.path.join(from_dir, f))
2018             if os.path.exists(os.path.join(h.holding_dir, f)):
2019                 os.unlink(os.path.join(h.holding_dir, f))
2020                           
2021         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2022         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2023             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2024
2025     ###########################################################################
2026
2027     def move_to_dir (self, dest, perms=0660, changesperms=0664):
2028         """
2029         Move files to dest with certain perms/changesperms
2030         """
2031         h = Holding()
2032         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2033                    dest, perms=changesperms)
2034         for f in self.pkg.files.keys():
2035             utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2036
2037     ###########################################################################
2038
2039     def force_reject(self, reject_files):
2040         """
2041         Forcefully move files from the current directory to the
2042         reject directory.  If any file already exists in the reject
2043         directory it will be moved to the morgue to make way for
2044         the new file.
2045
2046         @type files: dict
2047         @param files: file dictionary
2048
2049         """
2050
2051         cnf = Config()
2052
2053         for file_entry in reject_files:
2054             # Skip any files which don't exist or which we don't have permission to copy.
2055             if os.access(file_entry, os.R_OK) == 0:
2056                 continue
2057
2058             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2059
2060             try:
2061                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2062             except OSError, e:
2063                 # File exists?  Let's try and move it to the morgue
2064                 if e.errno == errno.EEXIST:
2065                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2066                     try:
2067                         morgue_file = utils.find_next_free(morgue_file)
2068                     except NoFreeFilenameError:
2069                         # Something's either gone badly Pete Tong, or
2070                         # someone is trying to exploit us.
2071                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2072                         return
2073                     utils.move(dest_file, morgue_file, perms=0660)
2074                     try:
2075                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2076                     except OSError, e:
2077                         # Likewise
2078                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2079                         return
2080                 else:
2081                     raise
2082             # If we got here, we own the destination file, so we can
2083             # safely overwrite it.
2084             utils.move(file_entry, dest_file, 1, perms=0660)
2085             os.close(dest_fd)
2086
2087     ###########################################################################
2088     def do_reject (self, manual=0, reject_message="", note=""):
2089         """
2090         Reject an upload. If called without a reject message or C{manual} is
2091         true, spawn an editor so the user can write one.
2092
2093         @type manual: bool
2094         @param manual: manual or automated rejection
2095
2096         @type reject_message: string
2097         @param reject_message: A reject message
2098
2099         @return: 0
2100
2101         """
2102         # If we weren't given a manual rejection message, spawn an
2103         # editor so the user can add one in...
2104         if manual and not reject_message:
2105             (fd, temp_filename) = utils.temp_filename()
2106             temp_file = os.fdopen(fd, 'w')
2107             if len(note) > 0:
2108                 for line in note:
2109                     temp_file.write(line)
2110             temp_file.close()
2111             editor = os.environ.get("EDITOR","vi")
2112             answer = 'E'
2113             while answer == 'E':
2114                 os.system("%s %s" % (editor, temp_filename))
2115                 temp_fh = utils.open_file(temp_filename)
2116                 reject_message = "".join(temp_fh.readlines())
2117                 temp_fh.close()
2118                 print "Reject message:"
2119                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2120                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2121                 answer = "XXX"
2122                 while prompt.find(answer) == -1:
2123                     answer = utils.our_raw_input(prompt)
2124                     m = re_default_answer.search(prompt)
2125                     if answer == "":
2126                         answer = m.group(1)
2127                     answer = answer[:1].upper()
2128             os.unlink(temp_filename)
2129             if answer == 'A':
2130                 return 1
2131             elif answer == 'Q':
2132                 sys.exit(0)
2133
2134         print "Rejecting.\n"
2135
2136         cnf = Config()
2137
2138         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2139         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2140
2141         # Move all the files into the reject directory
2142         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2143         self.force_reject(reject_files)
2144
2145         # If we fail here someone is probably trying to exploit the race
2146         # so let's just raise an exception ...
2147         if os.path.exists(reason_filename):
2148             os.unlink(reason_filename)
2149         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2150
2151         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2152
2153         self.update_subst()
2154         if not manual:
2155             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2156             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2157             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2158             os.write(reason_fd, reject_message)
2159             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2160         else:
2161             # Build up the rejection email
2162             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2163             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2164             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2165             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2166             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2167             # Write the rejection email out as the <foo>.reason file
2168             os.write(reason_fd, reject_mail_message)
2169
2170         del self.Subst["__REJECTOR_ADDRESS__"]
2171         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2172         del self.Subst["__CC__"]
2173
2174         os.close(reason_fd)
2175
2176         # Send the rejection mail if appropriate
2177         if not cnf["Dinstall::Options::No-Mail"]:
2178             utils.send_mail(reject_mail_message)
2179
2180         if self.logger:
2181             self.logger.log(["rejected", self.pkg.changes_file])
2182
2183         return 0
2184
2185     ################################################################################
2186     def in_override_p(self, package, component, suite, binary_type, filename, session):
2187         """
2188         Check if a package already has override entries in the DB
2189
2190         @type package: string
2191         @param package: package name
2192
2193         @type component: string
2194         @param component: database id of the component
2195
2196         @type suite: int
2197         @param suite: database id of the suite
2198
2199         @type binary_type: string
2200         @param binary_type: type of the package
2201
2202         @type filename: string
2203         @param filename: filename we check
2204
2205         @return: the database result. But noone cares anyway.
2206
2207         """
2208
2209         cnf = Config()
2210
2211         if binary_type == "": # must be source
2212             file_type = "dsc"
2213         else:
2214             file_type = binary_type
2215
2216         # Override suite name; used for example with proposed-updates
2217         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2218             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2219
2220         result = get_override(package, suite, component, file_type, session)
2221
2222         # If checking for a source package fall back on the binary override type
2223         if file_type == "dsc" and len(result) < 1:
2224             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2225
2226         # Remember the section and priority so we can check them later if appropriate
2227         if len(result) > 0:
2228             result = result[0]
2229             self.pkg.files[filename]["override section"] = result.section.section
2230             self.pkg.files[filename]["override priority"] = result.priority.priority
2231             return result
2232
2233         return None
2234
2235     ################################################################################
2236     def get_anyversion(self, sv_list, suite):
2237         """
2238         @type sv_list: list
2239         @param sv_list: list of (suite, version) tuples to check
2240
2241         @type suite: string
2242         @param suite: suite name
2243
2244         Description: TODO
2245         """
2246         Cnf = Config()
2247         anyversion = None
2248         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2249         for (s, v) in sv_list:
2250             if s in [ x.lower() for x in anysuite ]:
2251                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2252                     anyversion = v
2253
2254         return anyversion
2255
2256     ################################################################################
2257
2258     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2259         """
2260         @type sv_list: list
2261         @param sv_list: list of (suite, version) tuples to check
2262
2263         @type filename: string
2264         @param filename: XXX
2265
2266         @type new_version: string
2267         @param new_version: XXX
2268
2269         Ensure versions are newer than existing packages in target
2270         suites and that cross-suite version checking rules as
2271         set out in the conf file are satisfied.
2272         """
2273
2274         cnf = Config()
2275
2276         # Check versions for each target suite
2277         for target_suite in self.pkg.changes["distribution"].keys():
2278             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2279             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2280
2281             # Enforce "must be newer than target suite" even if conffile omits it
2282             if target_suite not in must_be_newer_than:
2283                 must_be_newer_than.append(target_suite)
2284
2285             for (suite, existent_version) in sv_list:
2286                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2287
2288                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2289                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2290
2291                 if suite in must_be_older_than and vercmp > -1:
2292                     cansave = 0
2293
2294                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2295                         # we really use the other suite, ignoring the conflicting one ...
2296                         addsuite = self.pkg.changes["distribution-version"][suite]
2297
2298                         add_version = self.get_anyversion(sv_list, addsuite)
2299                         target_version = self.get_anyversion(sv_list, target_suite)
2300
2301                         if not add_version:
2302                             # not add_version can only happen if we map to a suite
2303                             # that doesn't enhance the suite we're propup'ing from.
2304                             # so "propup-ver x a b c; map a d" is a problem only if
2305                             # d doesn't enhance a.
2306                             #
2307                             # i think we could always propagate in this case, rather
2308                             # than complaining. either way, this isn't a REJECT issue
2309                             #
2310                             # And - we really should complain to the dorks who configured dak
2311                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2312                             self.pkg.changes.setdefault("propdistribution", {})
2313                             self.pkg.changes["propdistribution"][addsuite] = 1
2314                             cansave = 1
2315                         elif not target_version:
2316                             # not targets_version is true when the package is NEW
2317                             # we could just stick with the "...old version..." REJECT
2318                             # for this, I think.
2319                             self.rejects.append("Won't propogate NEW packages.")
2320                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2321                             # propogation would be redundant. no need to reject though.
2322                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2323                             cansave = 1
2324                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2325                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2326                             # propogate!!
2327                             self.warnings.append("Propogating upload to %s" % (addsuite))
2328                             self.pkg.changes.setdefault("propdistribution", {})
2329                             self.pkg.changes["propdistribution"][addsuite] = 1
2330                             cansave = 1
2331
2332                     if not cansave:
2333                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2334
2335     ################################################################################
2336     def check_binary_against_db(self, filename, session):
2337         # Ensure version is sane
2338         q = session.query(BinAssociation)
2339         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2340         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2341
2342         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2343                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2344
2345         # Check for any existing copies of the file
2346         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2347         q = q.filter_by(version=self.pkg.files[filename]["version"])
2348         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2349
2350         if q.count() > 0:
2351             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2352
2353     ################################################################################
2354
2355     def check_source_against_db(self, filename, session):
2356         """
2357         """
2358         source = self.pkg.dsc.get("source")
2359         version = self.pkg.dsc.get("version")
2360
2361         # Ensure version is sane
2362         q = session.query(SrcAssociation)
2363         q = q.join(DBSource).filter(DBSource.source==source)
2364
2365         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2366                                        filename, version, sourceful=True)
2367
2368     ################################################################################
2369     def check_dsc_against_db(self, filename, session):
2370         """
2371
2372         @warning: NB: this function can remove entries from the 'files' index [if
2373          the orig tarball is a duplicate of the one in the archive]; if
2374          you're iterating over 'files' and call this function as part of
2375          the loop, be sure to add a check to the top of the loop to
2376          ensure you haven't just tried to dereference the deleted entry.
2377
2378         """
2379
2380         Cnf = Config()
2381         self.pkg.orig_files = {} # XXX: do we need to clear it?
2382         orig_files = self.pkg.orig_files
2383
2384         # Try and find all files mentioned in the .dsc.  This has
2385         # to work harder to cope with the multiple possible
2386         # locations of an .orig.tar.gz.
2387         # The ordering on the select is needed to pick the newest orig
2388         # when it exists in multiple places.
2389         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2390             found = None
2391             if self.pkg.files.has_key(dsc_name):
2392                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2393                 actual_size = int(self.pkg.files[dsc_name]["size"])
2394                 found = "%s in incoming" % (dsc_name)
2395
2396                 # Check the file does not already exist in the archive
2397                 ql = get_poolfile_like_name(dsc_name, session)
2398
2399                 # Strip out anything that isn't '%s' or '/%s$'
2400                 for i in ql:
2401                     if not i.filename.endswith(dsc_name):
2402                         ql.remove(i)
2403
2404                 # "[dak] has not broken them.  [dak] has fixed a
2405                 # brokenness.  Your crappy hack exploited a bug in
2406                 # the old dinstall.
2407                 #
2408                 # "(Come on!  I thought it was always obvious that
2409                 # one just doesn't release different files with
2410                 # the same name and version.)"
2411                 #                        -- ajk@ on d-devel@l.d.o
2412
2413                 if len(ql) > 0:
2414                     # Ignore exact matches for .orig.tar.gz
2415                     match = 0
2416                     if re_is_orig_source.match(dsc_name):
2417                         for i in ql:
2418                             if self.pkg.files.has_key(dsc_name) and \
2419                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2420                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2421                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2422                                 # TODO: Don't delete the entry, just mark it as not needed
2423                                 # This would fix the stupidity of changing something we often iterate over
2424                                 # whilst we're doing it
2425                                 del self.pkg.files[dsc_name]
2426                                 if not orig_files.has_key(dsc_name):
2427                                     orig_files[dsc_name] = {}
2428                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2429                                 match = 1
2430
2431                     if not match:
2432                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2433
2434             elif re_is_orig_source.match(dsc_name):
2435                 # Check in the pool
2436                 ql = get_poolfile_like_name(dsc_name, session)
2437
2438                 # Strip out anything that isn't '%s' or '/%s$'
2439                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2440                 for i in ql:
2441                     if not i.filename.endswith(dsc_name):
2442                         ql.remove(i)
2443
2444                 if len(ql) > 0:
2445                     # Unfortunately, we may get more than one match here if,
2446                     # for example, the package was in potato but had an -sa
2447                     # upload in woody.  So we need to choose the right one.
2448
2449                     # default to something sane in case we don't match any or have only one
2450                     x = ql[0]
2451
2452                     if len(ql) > 1:
2453                         for i in ql:
2454                             old_file = os.path.join(i.location.path, i.filename)
2455                             old_file_fh = utils.open_file(old_file)
2456                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2457                             old_file_fh.close()
2458                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2459                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2460                                 x = i
2461
2462                     old_file = os.path.join(i.location.path, i.filename)
2463                     old_file_fh = utils.open_file(old_file)
2464                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2465                     old_file_fh.close()
2466                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2467                     found = old_file
2468                     suite_type = x.location.archive_type
2469                     # need this for updating dsc_files in install()
2470                     dsc_entry["files id"] = x.file_id
2471                     # See install() in process-accepted...
2472                     if not orig_files.has_key(dsc_name):
2473                         orig_files[dsc_name] = {}
2474                     orig_files[dsc_name]["id"] = x.file_id
2475                     orig_files[dsc_name]["path"] = old_file
2476                     orig_files[dsc_name]["location"] = x.location.location_id
2477                 else:
2478                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2479                     # Not there? Check the queue directories...
2480                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2481                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2482                             continue
2483                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2484                         if os.path.exists(in_otherdir):
2485                             in_otherdir_fh = utils.open_file(in_otherdir)
2486                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2487                             in_otherdir_fh.close()
2488                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2489                             found = in_otherdir
2490                             if not orig_files.has_key(dsc_name):
2491                                 orig_files[dsc_name] = {}
2492                             orig_files[dsc_name]["path"] = in_otherdir
2493
2494                     if not found:
2495                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2496                         continue
2497             else:
2498                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2499                 continue
2500             if actual_md5 != dsc_entry["md5sum"]:
2501                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2502             if actual_size != int(dsc_entry["size"]):
2503                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2504
2505     ################################################################################
2506     # This is used by process-new and process-holding to recheck a changes file
2507     # at the time we're running.  It mainly wraps various other internal functions
2508     # and is similar to accepted_checks - these should probably be tidied up
2509     # and combined
2510     def recheck(self, session):
2511         cnf = Config()
2512         for f in self.pkg.files.keys():
2513             # The .orig.tar.gz can disappear out from under us is it's a
2514             # duplicate of one in the archive.
2515             if not self.pkg.files.has_key(f):
2516                 continue
2517
2518             entry = self.pkg.files[f]
2519
2520             # Check that the source still exists
2521             if entry["type"] == "deb":
2522                 source_version = entry["source version"]
2523                 source_package = entry["source package"]
2524                 if not self.pkg.changes["architecture"].has_key("source") \
2525                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2526                     source_epochless_version = re_no_epoch.sub('', source_version)
2527                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2528                     found = False
2529                     for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2530                         if cnf.has_key("Dir::Queue::%s" % (q)):
2531                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2532                                 found = True
2533                     if not found:
2534                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2535
2536             # Version and file overwrite checks
2537             if entry["type"] == "deb":
2538                 self.check_binary_against_db(f, session)
2539             elif entry["type"] == "dsc":
2540                 self.check_source_against_db(f, session)
2541                 self.check_dsc_against_db(f, session)
2542
2543     ################################################################################
2544     def accepted_checks(self, overwrite_checks, session):
2545         # Recheck anything that relies on the database; since that's not
2546         # frozen between accept and our run time when called from p-a.
2547
2548         # overwrite_checks is set to False when installing to stable/oldstable
2549
2550         propogate={}
2551         nopropogate={}
2552
2553         # Find the .dsc (again)
2554         dsc_filename = None
2555         for f in self.pkg.files.keys():
2556             if self.pkg.files[f]["type"] == "dsc":
2557                 dsc_filename = f
2558
2559         for checkfile in self.pkg.files.keys():
2560             # The .orig.tar.gz can disappear out from under us is it's a
2561             # duplicate of one in the archive.
2562             if not self.pkg.files.has_key(checkfile):
2563                 continue
2564
2565             entry = self.pkg.files[checkfile]
2566
2567             # Check that the source still exists
2568             if entry["type"] == "deb":
2569                 source_version = entry["source version"]
2570                 source_package = entry["source package"]
2571                 if not self.pkg.changes["architecture"].has_key("source") \
2572                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2573                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2574
2575             # Version and file overwrite checks
2576             if overwrite_checks:
2577                 if entry["type"] == "deb":
2578                     self.check_binary_against_db(checkfile, session)
2579                 elif entry["type"] == "dsc":
2580                     self.check_source_against_db(checkfile, session)
2581                     self.check_dsc_against_db(dsc_filename, session)
2582
2583             # propogate in the case it is in the override tables:
2584             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2585                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2586                     propogate[suite] = 1
2587                 else:
2588                     nopropogate[suite] = 1
2589
2590         for suite in propogate.keys():
2591             if suite in nopropogate:
2592                 continue
2593             self.pkg.changes["distribution"][suite] = 1
2594
2595         for checkfile in self.pkg.files.keys():
2596             # Check the package is still in the override tables
2597             for suite in self.pkg.changes["distribution"].keys():
2598                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2599                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2600
2601     ################################################################################
2602     # This is not really a reject, but an unaccept, but since a) the code for
2603     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2604     # extremely rare, for now we'll go with whining at our admin folks...
2605
2606     def do_unaccept(self):
2607         cnf = Config()
2608
2609         self.update_subst()
2610         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2611         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2612         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2613         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2614         if cnf.has_key("Dinstall::Bcc"):
2615             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2616
2617         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2618
2619         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2620
2621         # Write the rejection email out as the <foo>.reason file
2622         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2623         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2624
2625         # If we fail here someone is probably trying to exploit the race
2626         # so let's just raise an exception ...
2627         if os.path.exists(reject_filename):
2628             os.unlink(reject_filename)
2629
2630         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2631         os.write(fd, reject_mail_message)
2632         os.close(fd)
2633
2634         utils.send_mail(reject_mail_message)
2635
2636         del self.Subst["__REJECTOR_ADDRESS__"]
2637         del self.Subst["__REJECT_MESSAGE__"]
2638         del self.Subst["__CC__"]
2639
2640     ################################################################################
2641     # If any file of an upload has a recent mtime then chances are good
2642     # the file is still being uploaded.
2643
2644     def upload_too_new(self):
2645         cnf = Config()
2646         too_new = False
2647         # Move back to the original directory to get accurate time stamps
2648         cwd = os.getcwd()
2649         os.chdir(self.pkg.directory)
2650         file_list = self.pkg.files.keys()
2651         file_list.extend(self.pkg.dsc_files.keys())
2652         file_list.append(self.pkg.changes_file)
2653         for f in file_list:
2654             try:
2655                 last_modified = time.time()-os.path.getmtime(f)
2656                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2657                     too_new = True
2658                     break
2659             except:
2660                 pass
2661
2662         os.chdir(cwd)
2663         return too_new