]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
queue, check_source
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118 #        if f["type"] == "byhand":
119 #            continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     # Fix up the list of target suites
152     cnf = Config()
153     for suite in changes["suite"].keys():
154         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155         if override:
156             (olderr, newerr) = (get_suite(suite, session) == None,
157                                 get_suite(override, session) == None)
158             if olderr or newerr:
159                 (oinv, newinv) = ("", "")
160                 if olderr: oinv = "invalid "
161                 if newerr: ninv = "invalid "
162                 print "warning: overriding %ssuite %s to %ssuite %s" % (
163                         oinv, suite, ninv, override)
164             del changes["suite"][suite]
165             changes["suite"][override] = 1
166
167     for suite in changes["suite"].keys():
168         for pkg in new.keys():
169             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170             if len(ql) > 0:
171                 for file_entry in new[pkg]["files"]:
172                     if files[file_entry].has_key("new"):
173                         del files[file_entry]["new"]
174                 del new[pkg]
175
176     if warn:
177         for s in ['stable', 'oldstable']:
178             if changes["suite"].has_key(s):
179                 print "WARNING: overrides will be added for %s!" % s
180         for pkg in new.keys():
181             if new[pkg].has_key("othercomponents"):
182                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
183
184     session.close()
185
186     return new
187
188 ################################################################################
189
190 def check_valid(new):
191     """
192     Check if section and priority for NEW packages exist in database.
193     Additionally does sanity checks:
194       - debian-installer packages have to be udeb (or source)
195       - non debian-installer packages can not be udeb
196       - source priority can only be assigned to dsc file types
197
198     @type new: dict
199     @param new: Dict of new packages with their section, priority and type.
200
201     """
202     for pkg in new.keys():
203         section_name = new[pkg]["section"]
204         priority_name = new[pkg]["priority"]
205         file_type = new[pkg]["type"]
206
207         section = get_section(section_name)
208         if section is None:
209             new[pkg]["section id"] = -1
210         else:
211             new[pkg]["section id"] = section.section_id
212
213         priority = get_priority(priority_name)
214         if priority is None:
215             new[pkg]["priority id"] = -1
216         else:
217             new[pkg]["priority id"] = priority.priority_id
218
219         # Sanity checks
220         di = section_name.find("debian-installer") != -1
221
222         # If d-i, we must be udeb and vice-versa
223         if     (di and file_type not in ("udeb", "dsc")) or \
224            (not di and file_type == "udeb"):
225             new[pkg]["section id"] = -1
226
227         # If dsc we need to be source and vice-versa
228         if (priority == "source" and file_type != "dsc") or \
229            (priority != "source" and file_type == "dsc"):
230             new[pkg]["priority id"] = -1
231
232 ###############################################################################
233
234 def check_status(files):
235     new = byhand = 0
236     for f in files.keys():
237         if files[f].has_key("byhand"):
238             byhand = 1
239         elif files[f].has_key("new"):
240             new = 1
241     return (new, byhand)
242
243 ###############################################################################
244
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247     def __init__(self, future_cutoff, past_cutoff):
248         self.reset()
249         self.future_cutoff = future_cutoff
250         self.past_cutoff = past_cutoff
251
252     def reset(self):
253         self.future_files = {}
254         self.ancient_files = {}
255
256     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257         if MTime > self.future_cutoff:
258             self.future_files[Name] = MTime
259         if MTime < self.past_cutoff:
260             self.ancient_files[Name] = MTime
261
262 ###############################################################################
263
264 class Upload(object):
265     """
266     Everything that has to do with an upload processed.
267
268     """
269     def __init__(self):
270         self.logger = None
271         self.pkg = Changes()
272         self.reset()
273
274     ###########################################################################
275
276     def reset (self):
277         """ Reset a number of internal variables."""
278
279         # Initialize the substitution template map
280         cnf = Config()
281         self.Subst = {}
282         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286
287         self.rejects = []
288         self.warnings = []
289         self.notes = []
290
291         self.later_check_files = []
292
293         self.pkg.reset()
294
295     def package_info(self):
296         """
297         Format various messages from this Upload to send to the maintainer.
298         """
299
300         msgs = (
301             ('Reject Reasons', self.rejects),
302             ('Warnings', self.warnings),
303             ('Notes', self.notes),
304         )
305
306         msg = ''
307         for title, messages in msgs:
308             if messages:
309                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
310         msg += '\n\n'
311
312         return msg
313
314     ###########################################################################
315     def update_subst(self):
316         """ Set up the per-package template substitution mappings """
317
318         cnf = Config()
319
320         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
321         if not self.pkg.changes.has_key("architecture") or not \
322            isinstance(self.pkg.changes["architecture"], dict):
323             self.pkg.changes["architecture"] = { "Unknown" : "" }
324
325         # and maintainer2047 may not exist.
326         if not self.pkg.changes.has_key("maintainer2047"):
327             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
328
329         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
330         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
331         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
332
333         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
334         if self.pkg.changes["architecture"].has_key("source") and \
335            self.pkg.changes["changedby822"] != "" and \
336            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
337
338             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
339             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
340             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
341         else:
342             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
343             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
344             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
345
346         if "sponsoremail" in self.pkg.changes:
347             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
348
349         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
350             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
351
352         # Apply any global override of the Maintainer field
353         if cnf.get("Dinstall::OverrideMaintainer"):
354             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
355             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
356
357         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
358         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
359         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
360
361     ###########################################################################
362     def load_changes(self, filename):
363         """
364         @rtype: boolean
365         @rvalue: whether the changes file was valid or not.  We may want to
366                  reject even if this is True (see what gets put in self.rejects).
367                  This is simply to prevent us even trying things later which will
368                  fail because we couldn't properly parse the file.
369         """
370         Cnf = Config()
371         self.pkg.changes_file = filename
372
373         # Parse the .changes field into a dictionary
374         try:
375             self.pkg.changes.update(parse_changes(filename))
376         except CantOpenError:
377             self.rejects.append("%s: can't read file." % (filename))
378             return False
379         except ParseChangesError, line:
380             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
381             return False
382         except ChangesUnicodeError:
383             self.rejects.append("%s: changes file not proper utf-8" % (filename))
384             return False
385
386         # Parse the Files field from the .changes into another dictionary
387         try:
388             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
389         except ParseChangesError, line:
390             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
391             return False
392         except UnknownFormatError, format:
393             self.rejects.append("%s: unknown format '%s'." % (filename, format))
394             return False
395
396         # Check for mandatory fields
397         for i in ("distribution", "source", "binary", "architecture",
398                   "version", "maintainer", "files", "changes", "description"):
399             if not self.pkg.changes.has_key(i):
400                 # Avoid undefined errors later
401                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
402                 return False
403
404         # Strip a source version in brackets from the source field
405         if re_strip_srcver.search(self.pkg.changes["source"]):
406             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
407
408         # Ensure the source field is a valid package name.
409         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
410             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
411
412         # Split multi-value fields into a lower-level dictionary
413         for i in ("architecture", "distribution", "binary", "closes"):
414             o = self.pkg.changes.get(i, "")
415             if o != "":
416                 del self.pkg.changes[i]
417
418             self.pkg.changes[i] = {}
419
420             for j in o.split():
421                 self.pkg.changes[i][j] = 1
422
423         # Fix the Maintainer: field to be RFC822/2047 compatible
424         try:
425             (self.pkg.changes["maintainer822"],
426              self.pkg.changes["maintainer2047"],
427              self.pkg.changes["maintainername"],
428              self.pkg.changes["maintaineremail"]) = \
429                    fix_maintainer (self.pkg.changes["maintainer"])
430         except ParseMaintError, msg:
431             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
432                    % (filename, self.pkg.changes["maintainer"], msg))
433
434         # ...likewise for the Changed-By: field if it exists.
435         try:
436             (self.pkg.changes["changedby822"],
437              self.pkg.changes["changedby2047"],
438              self.pkg.changes["changedbyname"],
439              self.pkg.changes["changedbyemail"]) = \
440                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
441         except ParseMaintError, msg:
442             self.pkg.changes["changedby822"] = ""
443             self.pkg.changes["changedby2047"] = ""
444             self.pkg.changes["changedbyname"] = ""
445             self.pkg.changes["changedbyemail"] = ""
446
447             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
448                    % (filename, self.pkg.changes["changed-by"], msg))
449
450         # Ensure all the values in Closes: are numbers
451         if self.pkg.changes.has_key("closes"):
452             for i in self.pkg.changes["closes"].keys():
453                 if re_isanum.match (i) == None:
454                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
455
456         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
457         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
458         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
459
460         # Check the .changes is non-empty
461         if not self.pkg.files:
462             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
463             return False
464
465         # Changes was syntactically valid even if we'll reject
466         return True
467
468     ###########################################################################
469
470     def check_distributions(self):
471         "Check and map the Distribution field"
472
473         Cnf = Config()
474
475         # Handle suite mappings
476         for m in Cnf.ValueList("SuiteMappings"):
477             args = m.split()
478             mtype = args[0]
479             if mtype == "map" or mtype == "silent-map":
480                 (source, dest) = args[1:3]
481                 if self.pkg.changes["distribution"].has_key(source):
482                     del self.pkg.changes["distribution"][source]
483                     self.pkg.changes["distribution"][dest] = 1
484                     if mtype != "silent-map":
485                         self.notes.append("Mapping %s to %s." % (source, dest))
486                 if self.pkg.changes.has_key("distribution-version"):
487                     if self.pkg.changes["distribution-version"].has_key(source):
488                         self.pkg.changes["distribution-version"][source]=dest
489             elif mtype == "map-unreleased":
490                 (source, dest) = args[1:3]
491                 if self.pkg.changes["distribution"].has_key(source):
492                     for arch in self.pkg.changes["architecture"].keys():
493                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
494                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
495                             del self.pkg.changes["distribution"][source]
496                             self.pkg.changes["distribution"][dest] = 1
497                             break
498             elif mtype == "ignore":
499                 suite = args[1]
500                 if self.pkg.changes["distribution"].has_key(suite):
501                     del self.pkg.changes["distribution"][suite]
502                     self.warnings.append("Ignoring %s as a target suite." % (suite))
503             elif mtype == "reject":
504                 suite = args[1]
505                 if self.pkg.changes["distribution"].has_key(suite):
506                     self.rejects.append("Uploads to %s are not accepted." % (suite))
507             elif mtype == "propup-version":
508                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
509                 #
510                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
511                 if self.pkg.changes["distribution"].has_key(args[1]):
512                     self.pkg.changes.setdefault("distribution-version", {})
513                     for suite in args[2:]:
514                         self.pkg.changes["distribution-version"][suite] = suite
515
516         # Ensure there is (still) a target distribution
517         if len(self.pkg.changes["distribution"].keys()) < 1:
518             self.rejects.append("No valid distribution remaining.")
519
520         # Ensure target distributions exist
521         for suite in self.pkg.changes["distribution"].keys():
522             if not Cnf.has_key("Suite::%s" % (suite)):
523                 self.rejects.append("Unknown distribution `%s'." % (suite))
524
525     ###########################################################################
526
527     def binary_file_checks(self, f, session):
528         cnf = Config()
529         entry = self.pkg.files[f]
530
531         # Extract package control information
532         deb_file = utils.open_file(f)
533         try:
534             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
535         except:
536             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
537             deb_file.close()
538             # Can't continue, none of the checks on control would work.
539             return
540
541         # Check for mandantory "Description:"
542         deb_file.seek(0)
543         try:
544             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
545         except:
546             self.rejects.append("%s: Missing Description in binary package" % (f))
547             return
548
549         deb_file.close()
550
551         # Check for mandatory fields
552         for field in [ "Package", "Architecture", "Version" ]:
553             if control.Find(field) == None:
554                 # Can't continue
555                 self.rejects.append("%s: No %s field in control." % (f, field))
556                 return
557
558         # Ensure the package name matches the one give in the .changes
559         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
560             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
561
562         # Validate the package field
563         package = control.Find("Package")
564         if not re_valid_pkg_name.match(package):
565             self.rejects.append("%s: invalid package name '%s'." % (f, package))
566
567         # Validate the version field
568         version = control.Find("Version")
569         if not re_valid_version.match(version):
570             self.rejects.append("%s: invalid version number '%s'." % (f, version))
571
572         # Ensure the architecture of the .deb is one we know about.
573         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
574         architecture = control.Find("Architecture")
575         upload_suite = self.pkg.changes["distribution"].keys()[0]
576
577         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
578             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
579             self.rejects.append("Unknown architecture '%s'." % (architecture))
580
581         # Ensure the architecture of the .deb is one of the ones
582         # listed in the .changes.
583         if not self.pkg.changes["architecture"].has_key(architecture):
584             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
585
586         # Sanity-check the Depends field
587         depends = control.Find("Depends")
588         if depends == '':
589             self.rejects.append("%s: Depends field is empty." % (f))
590
591         # Sanity-check the Provides field
592         provides = control.Find("Provides")
593         if provides:
594             provide = re_spacestrip.sub('', provides)
595             if provide == '':
596                 self.rejects.append("%s: Provides field is empty." % (f))
597             prov_list = provide.split(",")
598             for prov in prov_list:
599                 if not re_valid_pkg_name.match(prov):
600                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
601
602         # Check the section & priority match those given in the .changes (non-fatal)
603         if     control.Find("Section") and entry["section"] != "" \
604            and entry["section"] != control.Find("Section"):
605             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
606                                 (f, control.Find("Section", ""), entry["section"]))
607         if control.Find("Priority") and entry["priority"] != "" \
608            and entry["priority"] != control.Find("Priority"):
609             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
610                                 (f, control.Find("Priority", ""), entry["priority"]))
611
612         entry["package"] = package
613         entry["architecture"] = architecture
614         entry["version"] = version
615         entry["maintainer"] = control.Find("Maintainer", "")
616
617         if f.endswith(".udeb"):
618             self.pkg.files[f]["dbtype"] = "udeb"
619         elif f.endswith(".deb"):
620             self.pkg.files[f]["dbtype"] = "deb"
621         else:
622             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
623
624         entry["source"] = control.Find("Source", entry["package"])
625
626         # Get the source version
627         source = entry["source"]
628         source_version = ""
629
630         if source.find("(") != -1:
631             m = re_extract_src_version.match(source)
632             source = m.group(1)
633             source_version = m.group(2)
634
635         if not source_version:
636             source_version = self.pkg.files[f]["version"]
637
638         entry["source package"] = source
639         entry["source version"] = source_version
640
641         # Ensure the filename matches the contents of the .deb
642         m = re_isadeb.match(f)
643
644         #  package name
645         file_package = m.group(1)
646         if entry["package"] != file_package:
647             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
648                                 (f, file_package, entry["dbtype"], entry["package"]))
649         epochless_version = re_no_epoch.sub('', control.Find("Version"))
650
651         #  version
652         file_version = m.group(2)
653         if epochless_version != file_version:
654             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
655                                 (f, file_version, entry["dbtype"], epochless_version))
656
657         #  architecture
658         file_architecture = m.group(3)
659         if entry["architecture"] != file_architecture:
660             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
661                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
662
663         # Check for existent source
664         source_version = entry["source version"]
665         source_package = entry["source package"]
666         if self.pkg.changes["architecture"].has_key("source"):
667             if source_version != self.pkg.changes["version"]:
668                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
669                                     (source_version, f, self.pkg.changes["version"]))
670         else:
671             # Check in the SQL database
672             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
673                 # Check in one of the other directories
674                 source_epochless_version = re_no_epoch.sub('', source_version)
675                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
676                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
677                     entry["byhand"] = 1
678                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
679                     entry["new"] = 1
680                 else:
681                     dsc_file_exists = False
682                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
683                         if cnf.has_key("Dir::Queue::%s" % (myq)):
684                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
685                                 dsc_file_exists = True
686                                 break
687
688                     if not dsc_file_exists:
689                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
690
691         # Check the version and for file overwrites
692         self.check_binary_against_db(f, session)
693
694         # Temporarily disable contents generation until we change the table storage layout
695         #b = Binary(f)
696         #b.scan_package()
697         #if len(b.rejects) > 0:
698         #    for j in b.rejects:
699         #        self.rejects.append(j)
700
701     def source_file_checks(self, f, session):
702         entry = self.pkg.files[f]
703
704         m = re_issource.match(f)
705         if not m:
706             return
707
708         entry["package"] = m.group(1)
709         entry["version"] = m.group(2)
710         entry["type"] = m.group(3)
711
712         # Ensure the source package name matches the Source filed in the .changes
713         if self.pkg.changes["source"] != entry["package"]:
714             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
715
716         # Ensure the source version matches the version in the .changes file
717         if re_is_orig_source.match(f):
718             changes_version = self.pkg.changes["chopversion2"]
719         else:
720             changes_version = self.pkg.changes["chopversion"]
721
722         if changes_version != entry["version"]:
723             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
724
725         # Ensure the .changes lists source in the Architecture field
726         if not self.pkg.changes["architecture"].has_key("source"):
727             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
728
729         # Check the signature of a .dsc file
730         if entry["type"] == "dsc":
731             # check_signature returns either:
732             #  (None, [list, of, rejects]) or (signature, [])
733             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
734             for j in rejects:
735                 self.rejects.append(j)
736
737         entry["architecture"] = "source"
738
739     def per_suite_file_checks(self, f, suite, session):
740         cnf = Config()
741         entry = self.pkg.files[f]
742
743         # Skip byhand
744         if entry.has_key("byhand"):
745             return
746
747         # Check we have fields we need to do these checks
748         oktogo = True
749         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
750             if not entry.has_key(m):
751                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
752                 oktogo = False
753
754         if not oktogo:
755             return
756
757         # Handle component mappings
758         for m in cnf.ValueList("ComponentMappings"):
759             (source, dest) = m.split()
760             if entry["component"] == source:
761                 entry["original component"] = source
762                 entry["component"] = dest
763
764         # Ensure the component is valid for the target suite
765         if cnf.has_key("Suite:%s::Components" % (suite)) and \
766            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
767             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
768             return
769
770         # Validate the component
771         if not get_component(entry["component"], session):
772             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
773             return
774
775         # See if the package is NEW
776         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
777             entry["new"] = 1
778
779         # Validate the priority
780         if entry["priority"].find('/') != -1:
781             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
782
783         # Determine the location
784         location = cnf["Dir::Pool"]
785         l = get_location(location, entry["component"], session=session)
786         if l is None:
787             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
788             entry["location id"] = -1
789         else:
790             entry["location id"] = l.location_id
791
792         # Check the md5sum & size against existing files (if any)
793         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
794
795         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
796                                          entry["size"], entry["md5sum"], entry["location id"])
797
798         if found is None:
799             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
800         elif found is False and poolfile is not None:
801             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
802         else:
803             if poolfile is None:
804                 entry["files id"] = None
805             else:
806                 entry["files id"] = poolfile.file_id
807
808         # Check for packages that have moved from one component to another
809         entry['suite'] = suite
810         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
811         if res.rowcount > 0:
812             entry["othercomponents"] = res.fetchone()[0]
813
814     def check_files(self, action=True):
815         file_keys = self.pkg.files.keys()
816         holding = Holding()
817         cnf = Config()
818
819         if action:
820             cwd = os.getcwd()
821             os.chdir(self.pkg.directory)
822             for f in file_keys:
823                 ret = holding.copy_to_holding(f)
824                 if ret is not None:
825                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
826
827             os.chdir(cwd)
828
829         # check we already know the changes file
830         # [NB: this check must be done post-suite mapping]
831         base_filename = os.path.basename(self.pkg.changes_file)
832
833         session = DBConn().session()
834
835         try:
836             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
837             # if in the pool or in a queue other than unchecked, reject
838             if (dbc.in_queue is None) \
839                    or (dbc.in_queue is not None
840                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
841                 self.rejects.append("%s file already known to dak" % base_filename)
842         except NoResultFound, e:
843             # not known, good
844             pass
845
846         has_binaries = False
847         has_source = False
848
849         for f, entry in self.pkg.files.items():
850             # Ensure the file does not already exist in one of the accepted directories
851             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
852                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
853                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
854                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
855
856             if not re_taint_free.match(f):
857                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
858
859             # Check the file is readable
860             if os.access(f, os.R_OK) == 0:
861                 # When running in -n, copy_to_holding() won't have
862                 # generated the reject_message, so we need to.
863                 if action:
864                     if os.path.exists(f):
865                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
866                     else:
867                         # Don't directly reject, mark to check later to deal with orig's
868                         # we can find in the pool
869                         self.later_check_files.append(f)
870                 entry["type"] = "unreadable"
871                 continue
872
873             # If it's byhand skip remaining checks
874             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
875                 entry["byhand"] = 1
876                 entry["type"] = "byhand"
877
878             # Checks for a binary package...
879             elif re_isadeb.match(f):
880                 has_binaries = True
881                 entry["type"] = "deb"
882
883                 # This routine appends to self.rejects/warnings as appropriate
884                 self.binary_file_checks(f, session)
885
886             # Checks for a source package...
887             elif re_issource.match(f):
888                 has_source = True
889
890                 # This routine appends to self.rejects/warnings as appropriate
891                 self.source_file_checks(f, session)
892
893             # Not a binary or source package?  Assume byhand...
894             else:
895                 entry["byhand"] = 1
896                 entry["type"] = "byhand"
897
898             # Per-suite file checks
899             entry["oldfiles"] = {}
900             for suite in self.pkg.changes["distribution"].keys():
901                 self.per_suite_file_checks(f, suite, session)
902
903         session.close()
904
905         # If the .changes file says it has source, it must have source.
906         if self.pkg.changes["architecture"].has_key("source"):
907             if not has_source:
908                 self.rejects.append("no source found and Architecture line in changes mention source.")
909
910             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
911                 self.rejects.append("source only uploads are not supported.")
912
913     ###########################################################################
914     def check_dsc(self, action=True, session=None):
915         """Returns bool indicating whether or not the source changes are valid"""
916         # Ensure there is source to check
917         if not self.pkg.changes["architecture"].has_key("source"):
918             return True
919
920         # Find the .dsc
921         dsc_filename = None
922         for f, entry in self.pkg.files.items():
923             if entry["type"] == "dsc":
924                 if dsc_filename:
925                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
926                     return False
927                 else:
928                     dsc_filename = f
929
930         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
931         if not dsc_filename:
932             self.rejects.append("source uploads must contain a dsc file")
933             return False
934
935         # Parse the .dsc file
936         try:
937             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
938         except CantOpenError:
939             # if not -n copy_to_holding() will have done this for us...
940             if not action:
941                 self.rejects.append("%s: can't read file." % (dsc_filename))
942         except ParseChangesError, line:
943             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
944         except InvalidDscError, line:
945             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
946         except ChangesUnicodeError:
947             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
948
949         # Build up the file list of files mentioned by the .dsc
950         try:
951             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
952         except NoFilesFieldError:
953             self.rejects.append("%s: no Files: field." % (dsc_filename))
954             return False
955         except UnknownFormatError, format:
956             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
957             return False
958         except ParseChangesError, line:
959             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
960             return False
961
962         # Enforce mandatory fields
963         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
964             if not self.pkg.dsc.has_key(i):
965                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
966                 return False
967
968         # Validate the source and version fields
969         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
970             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
971         if not re_valid_version.match(self.pkg.dsc["version"]):
972             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
973
974         # Only a limited list of source formats are allowed in each suite
975         for dist in self.pkg.changes["distribution"].keys():
976             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
977             if self.pkg.dsc["format"] not in allowed:
978                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
979
980         # Validate the Maintainer field
981         try:
982             # We ignore the return value
983             fix_maintainer(self.pkg.dsc["maintainer"])
984         except ParseMaintError, msg:
985             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
986                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
987
988         # Validate the build-depends field(s)
989         for field_name in [ "build-depends", "build-depends-indep" ]:
990             field = self.pkg.dsc.get(field_name)
991             if field:
992                 # Have apt try to parse them...
993                 try:
994                     apt_pkg.ParseSrcDepends(field)
995                 except:
996                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
997
998         # Ensure the version number in the .dsc matches the version number in the .changes
999         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1000         changes_version = self.pkg.files[dsc_filename]["version"]
1001
1002         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1003             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1004
1005         # Ensure the Files field contain only what's expected
1006         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1007
1008         # Ensure source is newer than existing source in target suites
1009         session = DBConn().session()
1010         self.check_source_against_db(dsc_filename, session)
1011         self.check_dsc_against_db(dsc_filename, session)
1012         session.close()
1013
1014         # Finally, check if we're missing any files
1015         for f in self.later_check_files:
1016             self.rejects.append("Could not find file %s references in changes" % f)
1017
1018         return True
1019
1020     ###########################################################################
1021
1022     def get_changelog_versions(self, source_dir):
1023         """Extracts a the source package and (optionally) grabs the
1024         version history out of debian/changelog for the BTS."""
1025
1026         cnf = Config()
1027
1028         # Find the .dsc (again)
1029         dsc_filename = None
1030         for f in self.pkg.files.keys():
1031             if self.pkg.files[f]["type"] == "dsc":
1032                 dsc_filename = f
1033
1034         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1035         if not dsc_filename:
1036             return
1037
1038         # Create a symlink mirror of the source files in our temporary directory
1039         for f in self.pkg.files.keys():
1040             m = re_issource.match(f)
1041             if m:
1042                 src = os.path.join(source_dir, f)
1043                 # If a file is missing for whatever reason, give up.
1044                 if not os.path.exists(src):
1045                     return
1046                 ftype = m.group(3)
1047                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1048                    self.pkg.orig_files[f].has_key("path"):
1049                     continue
1050                 dest = os.path.join(os.getcwd(), f)
1051                 os.symlink(src, dest)
1052
1053         # If the orig files are not a part of the upload, create symlinks to the
1054         # existing copies.
1055         for orig_file in self.pkg.orig_files.keys():
1056             if not self.pkg.orig_files[orig_file].has_key("path"):
1057                 continue
1058             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1059             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1060
1061         # Extract the source
1062         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1063         (result, output) = commands.getstatusoutput(cmd)
1064         if (result != 0):
1065             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1066             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1067             return
1068
1069         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1070             return
1071
1072         # Get the upstream version
1073         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1074         if re_strip_revision.search(upstr_version):
1075             upstr_version = re_strip_revision.sub('', upstr_version)
1076
1077         # Ensure the changelog file exists
1078         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1079         if not os.path.exists(changelog_filename):
1080             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1081             return
1082
1083         # Parse the changelog
1084         self.pkg.dsc["bts changelog"] = ""
1085         changelog_file = utils.open_file(changelog_filename)
1086         for line in changelog_file.readlines():
1087             m = re_changelog_versions.match(line)
1088             if m:
1089                 self.pkg.dsc["bts changelog"] += line
1090         changelog_file.close()
1091
1092         # Check we found at least one revision in the changelog
1093         if not self.pkg.dsc["bts changelog"]:
1094             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1095
1096     def check_source(self):
1097         # Bail out if:
1098         #    a) there's no source
1099         if not self.pkg.changes["architecture"].has_key("source"):
1100             return
1101
1102         tmpdir = utils.temp_dirname()
1103
1104         # Move into the temporary directory
1105         cwd = os.getcwd()
1106         os.chdir(tmpdir)
1107
1108         # Get the changelog version history
1109         self.get_changelog_versions(cwd)
1110
1111         # Move back and cleanup the temporary tree
1112         os.chdir(cwd)
1113
1114         try:
1115             shutil.rmtree(tmpdir)
1116         except OSError, e:
1117             if e.errno != errno.EACCES:
1118                 print "foobar"
1119                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1120
1121             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1122             # We probably have u-r or u-w directories so chmod everything
1123             # and try again.
1124             cmd = "chmod -R u+rwx %s" % (tmpdir)
1125             result = os.system(cmd)
1126             if result != 0:
1127                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1128             shutil.rmtree(tmpdir)
1129         except Exception, e:
1130             print "foobar2 (%s)" % e
1131             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1132
1133     ###########################################################################
1134     def ensure_hashes(self):
1135         # Make sure we recognise the format of the Files: field in the .changes
1136         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1137         if len(format) == 2:
1138             format = int(format[0]), int(format[1])
1139         else:
1140             format = int(float(format[0])), 0
1141
1142         # We need to deal with the original changes blob, as the fields we need
1143         # might not be in the changes dict serialised into the .dak anymore.
1144         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1145
1146         # Copy the checksums over to the current changes dict.  This will keep
1147         # the existing modifications to it intact.
1148         for field in orig_changes:
1149             if field.startswith('checksums-'):
1150                 self.pkg.changes[field] = orig_changes[field]
1151
1152         # Check for unsupported hashes
1153         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1154             self.rejects.append(j)
1155
1156         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1157             self.rejects.append(j)
1158
1159         # We have to calculate the hash if we have an earlier changes version than
1160         # the hash appears in rather than require it exist in the changes file
1161         for hashname, hashfunc, version in utils.known_hashes:
1162             # TODO: Move _ensure_changes_hash into this class
1163             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1164                 self.rejects.append(j)
1165             if "source" in self.pkg.changes["architecture"]:
1166                 # TODO: Move _ensure_dsc_hash into this class
1167                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1168                     self.rejects.append(j)
1169
1170     def check_hashes(self):
1171         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1172             self.rejects.append(m)
1173
1174         for m in utils.check_size(".changes", self.pkg.files):
1175             self.rejects.append(m)
1176
1177         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1178             self.rejects.append(m)
1179
1180         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1181             self.rejects.append(m)
1182
1183         self.ensure_hashes()
1184
1185     ###########################################################################
1186
1187     def ensure_orig(self, target_dir='.', session=None):
1188         """
1189         Ensures that all orig files mentioned in the changes file are present
1190         in target_dir. If they do not exist, they are symlinked into place.
1191
1192         An list containing the symlinks that were created are returned (so they
1193         can be removed).
1194         """
1195
1196         symlinked = []
1197         cnf = Config()
1198
1199         for filename, entry in self.pkg.dsc_files.iteritems():
1200             if not re_is_orig_source.match(filename):
1201                 # File is not an orig; ignore
1202                 continue
1203
1204             if os.path.exists(filename):
1205                 # File exists, no need to continue
1206                 continue
1207
1208             def symlink_if_valid(path):
1209                 f = utils.open_file(path)
1210                 md5sum = apt_pkg.md5sum(f)
1211                 f.close()
1212
1213                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1214                 expected = (int(entry['size']), entry['md5sum'])
1215
1216                 if fingerprint != expected:
1217                     return False
1218
1219                 dest = os.path.join(target_dir, filename)
1220
1221                 os.symlink(path, dest)
1222                 symlinked.append(dest)
1223
1224                 return True
1225
1226             session_ = session
1227             if session is None:
1228                 session_ = DBConn().session()
1229
1230             found = False
1231
1232             # Look in the pool
1233             for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1234                 poolfile_path = os.path.join(
1235                     poolfile.location.path, poolfile.filename
1236                 )
1237
1238                 if symlink_if_valid(poolfile_path):
1239                     found = True
1240                     break
1241
1242             if session is None:
1243                 session_.close()
1244
1245             if found:
1246                 continue
1247
1248             # Look in some other queues for the file
1249             queues = ('New', 'Byhand', 'ProposedUpdates',
1250                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1251
1252             for queue in queues:
1253                 if not cnf.get('Dir::Queue::%s' % queue):
1254                     continue
1255
1256                 queuefile_path = os.path.join(
1257                     cnf['Dir::Queue::%s' % queue], filename
1258                 )
1259
1260                 if not os.path.exists(queuefile_path):
1261                     # Does not exist in this queue
1262                     continue
1263
1264                 if symlink_if_valid(queuefile_path):
1265                     break
1266
1267         return symlinked
1268
1269     ###########################################################################
1270
1271     def check_lintian(self):
1272         """
1273         Extends self.rejects by checking the output of lintian against tags
1274         specified in Dinstall::LintianTags.
1275         """
1276
1277         cnf = Config()
1278
1279         # Don't reject binary uploads
1280         if not self.pkg.changes['architecture'].has_key('source'):
1281             return
1282
1283         # Only check some distributions
1284         for dist in ('unstable', 'experimental'):
1285             if dist in self.pkg.changes['distribution']:
1286                 break
1287         else:
1288             return
1289
1290         # If we do not have a tagfile, don't do anything
1291         tagfile = cnf.get("Dinstall::LintianTags")
1292         if tagfile is None:
1293             return
1294
1295         # Parse the yaml file
1296         sourcefile = file(tagfile, 'r')
1297         sourcecontent = sourcefile.read()
1298         sourcefile.close()
1299
1300         try:
1301             lintiantags = yaml.load(sourcecontent)['lintian']
1302         except yaml.YAMLError, msg:
1303             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1304             return
1305
1306         # Try and find all orig mentioned in the .dsc
1307         symlinked = self.ensure_orig()
1308
1309         # Setup the input file for lintian
1310         fd, temp_filename = utils.temp_filename()
1311         temptagfile = os.fdopen(fd, 'w')
1312         for tags in lintiantags.values():
1313             temptagfile.writelines(['%s\n' % x for x in tags])
1314         temptagfile.close()
1315
1316         try:
1317             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1318                 (temp_filename, self.pkg.changes_file)
1319
1320             result, output = commands.getstatusoutput(cmd)
1321         finally:
1322             # Remove our tempfile and any symlinks we created
1323             os.unlink(temp_filename)
1324
1325             for symlink in symlinked:
1326                 os.unlink(symlink)
1327
1328         if result == 2:
1329             utils.warn("lintian failed for %s [return code: %s]." % \
1330                 (self.pkg.changes_file, result))
1331             utils.warn(utils.prefix_multi_line_string(output, \
1332                 " [possible output:] "))
1333
1334         def log(*txt):
1335             if self.logger:
1336                 self.logger.log(
1337                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1338                 )
1339
1340         # Generate messages
1341         parsed_tags = parse_lintian_output(output)
1342         self.rejects.extend(
1343             generate_reject_messages(parsed_tags, lintiantags, log=log)
1344         )
1345
1346     ###########################################################################
1347     def check_urgency(self):
1348         cnf = Config()
1349         if self.pkg.changes["architecture"].has_key("source"):
1350             if not self.pkg.changes.has_key("urgency"):
1351                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1352             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1353             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1354                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1355                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1356                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1357
1358     ###########################################################################
1359
1360     # Sanity check the time stamps of files inside debs.
1361     # [Files in the near future cause ugly warnings and extreme time
1362     #  travel can cause errors on extraction]
1363
1364     def check_timestamps(self):
1365         Cnf = Config()
1366
1367         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1368         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1369         tar = TarTime(future_cutoff, past_cutoff)
1370
1371         for filename, entry in self.pkg.files.items():
1372             if entry["type"] == "deb":
1373                 tar.reset()
1374                 try:
1375                     deb_file = utils.open_file(filename)
1376                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1377                     deb_file.seek(0)
1378                     try:
1379                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1380                     except SystemError, e:
1381                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1382                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1383                             raise
1384                         deb_file.seek(0)
1385                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1386
1387                     deb_file.close()
1388
1389                     future_files = tar.future_files.keys()
1390                     if future_files:
1391                         num_future_files = len(future_files)
1392                         future_file = future_files[0]
1393                         future_date = tar.future_files[future_file]
1394                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1395                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1396
1397                     ancient_files = tar.ancient_files.keys()
1398                     if ancient_files:
1399                         num_ancient_files = len(ancient_files)
1400                         ancient_file = ancient_files[0]
1401                         ancient_date = tar.ancient_files[ancient_file]
1402                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1403                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1404                 except:
1405                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1406
1407     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1408         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1409             sponsored = False
1410         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1411             sponsored = False
1412             if uid_name == "":
1413                 sponsored = True
1414         else:
1415             sponsored = True
1416             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1417                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1418                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1419                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1420                         self.pkg.changes["sponsoremail"] = uid_email
1421
1422         return sponsored
1423
1424
1425     ###########################################################################
1426     # check_signed_by_key checks
1427     ###########################################################################
1428
1429     def check_signed_by_key(self):
1430         """Ensure the .changes is signed by an authorized uploader."""
1431         session = DBConn().session()
1432
1433         # First of all we check that the person has proper upload permissions
1434         # and that this upload isn't blocked
1435         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1436
1437         if fpr is None:
1438             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1439             return
1440
1441         # TODO: Check that import-keyring adds UIDs properly
1442         if not fpr.uid:
1443             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1444             return
1445
1446         # Check that the fingerprint which uploaded has permission to do so
1447         self.check_upload_permissions(fpr, session)
1448
1449         # Check that this package is not in a transition
1450         self.check_transition(session)
1451
1452         session.close()
1453
1454
1455     def check_upload_permissions(self, fpr, session):
1456         # Check any one-off upload blocks
1457         self.check_upload_blocks(fpr, session)
1458
1459         # Start with DM as a special case
1460         # DM is a special case unfortunately, so we check it first
1461         # (keys with no source access get more access than DMs in one
1462         #  way; DMs can only upload for their packages whether source
1463         #  or binary, whereas keys with no access might be able to
1464         #  upload some binaries)
1465         if fpr.source_acl.access_level == 'dm':
1466             self.check_dm_upload(fpr, session)
1467         else:
1468             # Check source-based permissions for other types
1469             if self.pkg.changes["architecture"].has_key("source") and \
1470                 fpr.source_acl.access_level is None:
1471                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1472                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1473                 self.rejects.append(rej)
1474                 return
1475             # If not a DM, we allow full upload rights
1476             uid_email = "%s@debian.org" % (fpr.uid.uid)
1477             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1478
1479
1480         # Check binary upload permissions
1481         # By this point we know that DMs can't have got here unless they
1482         # are allowed to deal with the package concerned so just apply
1483         # normal checks
1484         if fpr.binary_acl.access_level == 'full':
1485             return
1486
1487         # Otherwise we're in the map case
1488         tmparches = self.pkg.changes["architecture"].copy()
1489         tmparches.pop('source', None)
1490
1491         for bam in fpr.binary_acl_map:
1492             tmparches.pop(bam.architecture.arch_string, None)
1493
1494         if len(tmparches.keys()) > 0:
1495             if fpr.binary_reject:
1496                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1497                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1498                 self.rejects.append(rej)
1499             else:
1500                 # TODO: This is where we'll implement reject vs throw away binaries later
1501                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1502                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1503                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1504                 self.rejects.append(rej)
1505
1506
1507     def check_upload_blocks(self, fpr, session):
1508         """Check whether any upload blocks apply to this source, source
1509            version, uid / fpr combination"""
1510
1511         def block_rej_template(fb):
1512             rej = 'Manual upload block in place for package %s' % fb.source
1513             if fb.version is not None:
1514                 rej += ', version %s' % fb.version
1515             return rej
1516
1517         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1518             # version is None if the block applies to all versions
1519             if fb.version is None or fb.version == self.pkg.changes['version']:
1520                 # Check both fpr and uid - either is enough to cause a reject
1521                 if fb.fpr is not None:
1522                     if fb.fpr.fingerprint == fpr.fingerprint:
1523                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1524                 if fb.uid is not None:
1525                     if fb.uid == fpr.uid:
1526                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1527
1528
1529     def check_dm_upload(self, fpr, session):
1530         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1531         ## none of the uploaded packages are NEW
1532         rej = False
1533         for f in self.pkg.files.keys():
1534             if self.pkg.files[f].has_key("byhand"):
1535                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1536                 rej = True
1537             if self.pkg.files[f].has_key("new"):
1538                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1539                 rej = True
1540
1541         if rej:
1542             return
1543
1544         ## the most recent version of the package uploaded to unstable or
1545         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1546         ## section of its control file
1547         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1548         q = q.join(SrcAssociation)
1549         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1550         q = q.order_by(desc('source.version')).limit(1)
1551
1552         r = q.all()
1553
1554         if len(r) != 1:
1555             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1556             self.rejects.append(rej)
1557             return
1558
1559         r = r[0]
1560         if not r.dm_upload_allowed:
1561             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1562             self.rejects.append(rej)
1563             return
1564
1565         ## the Maintainer: field of the uploaded .changes file corresponds with
1566         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1567         ## uploads)
1568         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1569             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1570
1571         ## the most recent version of the package uploaded to unstable or
1572         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1573         ## non-developer maintainers cannot NMU or hijack packages)
1574
1575         # srcuploaders includes the maintainer
1576         accept = False
1577         for sup in r.srcuploaders:
1578             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1579             # Eww - I hope we never have two people with the same name in Debian
1580             if email == fpr.uid.uid or name == fpr.uid.name:
1581                 accept = True
1582                 break
1583
1584         if not accept:
1585             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1586             return
1587
1588         ## none of the packages are being taken over from other source packages
1589         for b in self.pkg.changes["binary"].keys():
1590             for suite in self.pkg.changes["distribution"].keys():
1591                 q = session.query(DBSource)
1592                 q = q.join(DBBinary).filter_by(package=b)
1593                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1594
1595                 for s in q.all():
1596                     if s.source != self.pkg.changes["source"]:
1597                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1598
1599
1600
1601     def check_transition(self, session):
1602         cnf = Config()
1603
1604         sourcepkg = self.pkg.changes["source"]
1605
1606         # No sourceful upload -> no need to do anything else, direct return
1607         # We also work with unstable uploads, not experimental or those going to some
1608         # proposed-updates queue
1609         if "source" not in self.pkg.changes["architecture"] or \
1610            "unstable" not in self.pkg.changes["distribution"]:
1611             return
1612
1613         # Also only check if there is a file defined (and existant) with
1614         # checks.
1615         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1616         if transpath == "" or not os.path.exists(transpath):
1617             return
1618
1619         # Parse the yaml file
1620         sourcefile = file(transpath, 'r')
1621         sourcecontent = sourcefile.read()
1622         try:
1623             transitions = yaml.load(sourcecontent)
1624         except yaml.YAMLError, msg:
1625             # This shouldn't happen, there is a wrapper to edit the file which
1626             # checks it, but we prefer to be safe than ending up rejecting
1627             # everything.
1628             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1629             return
1630
1631         # Now look through all defined transitions
1632         for trans in transitions:
1633             t = transitions[trans]
1634             source = t["source"]
1635             expected = t["new"]
1636
1637             # Will be None if nothing is in testing.
1638             current = get_source_in_suite(source, "testing", session)
1639             if current is not None:
1640                 compare = apt_pkg.VersionCompare(current.version, expected)
1641
1642             if current is None or compare < 0:
1643                 # This is still valid, the current version in testing is older than
1644                 # the new version we wait for, or there is none in testing yet
1645
1646                 # Check if the source we look at is affected by this.
1647                 if sourcepkg in t['packages']:
1648                     # The source is affected, lets reject it.
1649
1650                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1651                         sourcepkg, trans)
1652
1653                     if current is not None:
1654                         currentlymsg = "at version %s" % (current.version)
1655                     else:
1656                         currentlymsg = "not present in testing"
1657
1658                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1659
1660                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1661 is part of a testing transition designed to get %s migrated (it is
1662 currently %s, we need version %s).  This transition is managed by the
1663 Release Team, and %s is the Release-Team member responsible for it.
1664 Please mail debian-release@lists.debian.org or contact %s directly if you
1665 need further assistance.  You might want to upload to experimental until this
1666 transition is done."""
1667                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1668
1669                     self.rejects.append(rejectmsg)
1670                     return
1671
1672     ###########################################################################
1673     # End check_signed_by_key checks
1674     ###########################################################################
1675
1676     def build_summaries(self):
1677         """ Build a summary of changes the upload introduces. """
1678
1679         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1680
1681         short_summary = summary
1682
1683         # This is for direport's benefit...
1684         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1685
1686         if byhand or new:
1687             summary += "Changes: " + f
1688
1689         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1690
1691         summary += self.announce(short_summary, 0)
1692
1693         return (summary, short_summary)
1694
1695     ###########################################################################
1696
1697     def close_bugs(self, summary, action):
1698         """
1699         Send mail to close bugs as instructed by the closes field in the changes file.
1700         Also add a line to summary if any work was done.
1701
1702         @type summary: string
1703         @param summary: summary text, as given by L{build_summaries}
1704
1705         @type action: bool
1706         @param action: Set to false no real action will be done.
1707
1708         @rtype: string
1709         @return: summary. If action was taken, extended by the list of closed bugs.
1710
1711         """
1712
1713         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1714
1715         bugs = self.pkg.changes["closes"].keys()
1716
1717         if not bugs:
1718             return summary
1719
1720         bugs.sort()
1721         summary += "Closing bugs: "
1722         for bug in bugs:
1723             summary += "%s " % (bug)
1724             if action:
1725                 self.update_subst()
1726                 self.Subst["__BUG_NUMBER__"] = bug
1727                 if self.pkg.changes["distribution"].has_key("stable"):
1728                     self.Subst["__STABLE_WARNING__"] = """
1729 Note that this package is not part of the released stable Debian
1730 distribution.  It may have dependencies on other unreleased software,
1731 or other instabilities.  Please take care if you wish to install it.
1732 The update will eventually make its way into the next released Debian
1733 distribution."""
1734                 else:
1735                     self.Subst["__STABLE_WARNING__"] = ""
1736                 mail_message = utils.TemplateSubst(self.Subst, template)
1737                 utils.send_mail(mail_message)
1738
1739                 # Clear up after ourselves
1740                 del self.Subst["__BUG_NUMBER__"]
1741                 del self.Subst["__STABLE_WARNING__"]
1742
1743         if action and self.logger:
1744             self.logger.log(["closing bugs"] + bugs)
1745
1746         summary += "\n"
1747
1748         return summary
1749
1750     ###########################################################################
1751
1752     def announce(self, short_summary, action):
1753         """
1754         Send an announce mail about a new upload.
1755
1756         @type short_summary: string
1757         @param short_summary: Short summary text to include in the mail
1758
1759         @type action: bool
1760         @param action: Set to false no real action will be done.
1761
1762         @rtype: string
1763         @return: Textstring about action taken.
1764
1765         """
1766
1767         cnf = Config()
1768         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1769
1770         # Only do announcements for source uploads with a recent dpkg-dev installed
1771         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1772            self.pkg.changes["architecture"].has_key("source"):
1773             return ""
1774
1775         lists_done = {}
1776         summary = ""
1777
1778         self.Subst["__SHORT_SUMMARY__"] = short_summary
1779
1780         for dist in self.pkg.changes["distribution"].keys():
1781             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1782             if announce_list == "" or lists_done.has_key(announce_list):
1783                 continue
1784
1785             lists_done[announce_list] = 1
1786             summary += "Announcing to %s\n" % (announce_list)
1787
1788             if action:
1789                 self.update_subst()
1790                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1791                 if cnf.get("Dinstall::TrackingServer") and \
1792                    self.pkg.changes["architecture"].has_key("source"):
1793                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1794                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1795
1796                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1797                 utils.send_mail(mail_message)
1798
1799                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1800
1801         if cnf.FindB("Dinstall::CloseBugs"):
1802             summary = self.close_bugs(summary, action)
1803
1804         del self.Subst["__SHORT_SUMMARY__"]
1805
1806         return summary
1807
1808     ###########################################################################
1809     @session_wrapper
1810     def accept (self, summary, short_summary, session=None):
1811         """
1812         Accept an upload.
1813
1814         This moves all files referenced from the .changes into the pool,
1815         sends the accepted mail, announces to lists, closes bugs and
1816         also checks for override disparities. If enabled it will write out
1817         the version history for the BTS Version Tracking and will finally call
1818         L{queue_build}.
1819
1820         @type summary: string
1821         @param summary: Summary text
1822
1823         @type short_summary: string
1824         @param short_summary: Short summary
1825         """
1826
1827         cnf = Config()
1828         stats = SummaryStats()
1829
1830         print "Installing."
1831         self.logger.log(["installing changes", self.pkg.changes_file])
1832
1833         poolfiles = []
1834
1835         # Add the .dsc file to the DB first
1836         for newfile, entry in self.pkg.files.items():
1837             if entry["type"] == "dsc":
1838                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1839                 for j in pfs:
1840                     poolfiles.append(j)
1841
1842         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1843         for newfile, entry in self.pkg.files.items():
1844             if entry["type"] == "deb":
1845                 poolfiles.append(add_deb_to_db(self, newfile, session))
1846
1847         # If this is a sourceful diff only upload that is moving
1848         # cross-component we need to copy the .orig files into the new
1849         # component too for the same reasons as above.
1850         # XXX: mhy: I think this should be in add_dsc_to_db
1851         if self.pkg.changes["architecture"].has_key("source"):
1852             for orig_file in self.pkg.orig_files.keys():
1853                 if not self.pkg.orig_files[orig_file].has_key("id"):
1854                     continue # Skip if it's not in the pool
1855                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1856                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1857                     continue # Skip if the location didn't change
1858
1859                 # Do the move
1860                 oldf = get_poolfile_by_id(orig_file_id, session)
1861                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1862                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1863                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1864
1865                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1866
1867                 # TODO: Care about size/md5sum collisions etc
1868                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1869
1870                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1871                 if newf is None:
1872                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1873                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1874
1875                     session.flush()
1876
1877                     # Don't reference the old file from this changes
1878                     for p in poolfiles:
1879                         if p.file_id == oldf.file_id:
1880                             poolfiles.remove(p)
1881
1882                     poolfiles.append(newf)
1883
1884                     # Fix up the DSC references
1885                     toremove = []
1886
1887                     for df in source.srcfiles:
1888                         if df.poolfile.file_id == oldf.file_id:
1889                             # Add a new DSC entry and mark the old one for deletion
1890                             # Don't do it in the loop so we don't change the thing we're iterating over
1891                             newdscf = DSCFile()
1892                             newdscf.source_id = source.source_id
1893                             newdscf.poolfile_id = newf.file_id
1894                             session.add(newdscf)
1895
1896                             toremove.append(df)
1897
1898                     for df in toremove:
1899                         session.delete(df)
1900
1901                     # Flush our changes
1902                     session.flush()
1903
1904                     # Make sure that our source object is up-to-date
1905                     session.expire(source)
1906
1907         # Install the files into the pool
1908         for newfile, entry in self.pkg.files.items():
1909             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1910             utils.move(newfile, destination)
1911             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1912             stats.accept_bytes += float(entry["size"])
1913
1914         # Copy the .changes file across for suite which need it.
1915         copy_changes = {}
1916         for suite_name in self.pkg.changes["distribution"].keys():
1917             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1918                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1919
1920         for dest in copy_changes.keys():
1921             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1922
1923         # We're done - commit the database changes
1924         session.commit()
1925         # Our SQL session will automatically start a new transaction after
1926         # the last commit
1927
1928         # Move the .changes into the 'done' directory
1929         utils.move(self.pkg.changes_file,
1930                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1931
1932         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1933             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1934
1935         self.update_subst()
1936         self.Subst["__SUITE__"] = ""
1937         self.Subst["__SUMMARY__"] = summary
1938         mail_message = utils.TemplateSubst(self.Subst,
1939                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1940         utils.send_mail(mail_message)
1941         self.announce(short_summary, 1)
1942
1943         ## Helper stuff for DebBugs Version Tracking
1944         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1945             if self.pkg.changes["architecture"].has_key("source"):
1946                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1947                 version_history = os.fdopen(fd, 'w')
1948                 version_history.write(self.pkg.dsc["bts changelog"])
1949                 version_history.close()
1950                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1951                                       self.pkg.changes_file[:-8]+".versions")
1952                 os.rename(temp_filename, filename)
1953                 os.chmod(filename, 0644)
1954
1955             # Write out the binary -> source mapping.
1956             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1957             debinfo = os.fdopen(fd, 'w')
1958             for name, entry in sorted(self.pkg.files.items()):
1959                 if entry["type"] == "deb":
1960                     line = " ".join([entry["package"], entry["version"],
1961                                      entry["architecture"], entry["source package"],
1962                                      entry["source version"]])
1963                     debinfo.write(line+"\n")
1964             debinfo.close()
1965             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1966                                   self.pkg.changes_file[:-8]+".debinfo")
1967             os.rename(temp_filename, filename)
1968             os.chmod(filename, 0644)
1969
1970         session.commit()
1971
1972         # Set up our copy queues (e.g. buildd queues)
1973         for suite_name in self.pkg.changes["distribution"].keys():
1974             suite = get_suite(suite_name, session)
1975             for q in suite.copy_queues:
1976                 for f in poolfiles:
1977                     q.add_file_from_pool(f)
1978
1979         session.commit()
1980
1981         # Finally...
1982         stats.accept_count += 1
1983
1984     def check_override(self):
1985         """
1986         Checks override entries for validity. Mails "Override disparity" warnings,
1987         if that feature is enabled.
1988
1989         Abandons the check if
1990           - override disparity checks are disabled
1991           - mail sending is disabled
1992         """
1993
1994         cnf = Config()
1995
1996         # Abandon the check if override disparity checks have been disabled
1997         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1998             return
1999
2000         summary = self.pkg.check_override()
2001
2002         if summary == "":
2003             return
2004
2005         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2006
2007         self.update_subst()
2008         self.Subst["__SUMMARY__"] = summary
2009         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2010         utils.send_mail(mail_message)
2011         del self.Subst["__SUMMARY__"]
2012
2013     ###########################################################################
2014
2015     def remove(self, from_dir=None):
2016         """
2017         Used (for instance) in p-u to remove the package from unchecked
2018
2019         Also removes the package from holding area.
2020         """
2021         if from_dir is None:
2022             from_dir = self.pkg.directory
2023         h = Holding()
2024
2025         for f in self.pkg.files.keys():
2026             os.unlink(os.path.join(from_dir, f))
2027             if os.path.exists(os.path.join(h.holding_dir, f)):
2028                 os.unlink(os.path.join(h.holding_dir, f))
2029
2030         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2031         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2032             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2033
2034     ###########################################################################
2035
2036     def move_to_queue (self, queue):
2037         """
2038         Move files to a destination queue using the permissions in the table
2039         """
2040         h = Holding()
2041         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2042                    queue.path, perms=int(queue.change_perms, 8))
2043         for f in self.pkg.files.keys():
2044             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2045
2046     ###########################################################################
2047
2048     def force_reject(self, reject_files):
2049         """
2050         Forcefully move files from the current directory to the
2051         reject directory.  If any file already exists in the reject
2052         directory it will be moved to the morgue to make way for
2053         the new file.
2054
2055         @type files: dict
2056         @param files: file dictionary
2057
2058         """
2059
2060         cnf = Config()
2061
2062         for file_entry in reject_files:
2063             # Skip any files which don't exist or which we don't have permission to copy.
2064             if os.access(file_entry, os.R_OK) == 0:
2065                 continue
2066
2067             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2068
2069             try:
2070                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2071             except OSError, e:
2072                 # File exists?  Let's find a new name by adding a number
2073                 if e.errno == errno.EEXIST:
2074                     try:
2075                         dest_file = utils.find_next_free(dest_file, 255)
2076                     except NoFreeFilenameError:
2077                         # Something's either gone badly Pete Tong, or
2078                         # someone is trying to exploit us.
2079                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2080                         return
2081
2082                     # Make sure we really got it
2083                     try:
2084                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2085                     except OSError, e:
2086                         # Likewise
2087                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2088                         return
2089                 else:
2090                     raise
2091             # If we got here, we own the destination file, so we can
2092             # safely overwrite it.
2093             utils.move(file_entry, dest_file, 1, perms=0660)
2094             os.close(dest_fd)
2095
2096     ###########################################################################
2097     def do_reject (self, manual=0, reject_message="", notes=""):
2098         """
2099         Reject an upload. If called without a reject message or C{manual} is
2100         true, spawn an editor so the user can write one.
2101
2102         @type manual: bool
2103         @param manual: manual or automated rejection
2104
2105         @type reject_message: string
2106         @param reject_message: A reject message
2107
2108         @return: 0
2109
2110         """
2111         # If we weren't given a manual rejection message, spawn an
2112         # editor so the user can add one in...
2113         if manual and not reject_message:
2114             (fd, temp_filename) = utils.temp_filename()
2115             temp_file = os.fdopen(fd, 'w')
2116             if len(notes) > 0:
2117                 for note in notes:
2118                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2119                                     % (note.author, note.version, note.notedate, note.comment))
2120             temp_file.close()
2121             editor = os.environ.get("EDITOR","vi")
2122             answer = 'E'
2123             while answer == 'E':
2124                 os.system("%s %s" % (editor, temp_filename))
2125                 temp_fh = utils.open_file(temp_filename)
2126                 reject_message = "".join(temp_fh.readlines())
2127                 temp_fh.close()
2128                 print "Reject message:"
2129                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2130                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2131                 answer = "XXX"
2132                 while prompt.find(answer) == -1:
2133                     answer = utils.our_raw_input(prompt)
2134                     m = re_default_answer.search(prompt)
2135                     if answer == "":
2136                         answer = m.group(1)
2137                     answer = answer[:1].upper()
2138             os.unlink(temp_filename)
2139             if answer == 'A':
2140                 return 1
2141             elif answer == 'Q':
2142                 sys.exit(0)
2143
2144         print "Rejecting.\n"
2145
2146         cnf = Config()
2147
2148         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2149         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2150
2151         # Move all the files into the reject directory
2152         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2153         self.force_reject(reject_files)
2154
2155         # If we fail here someone is probably trying to exploit the race
2156         # so let's just raise an exception ...
2157         if os.path.exists(reason_filename):
2158             os.unlink(reason_filename)
2159         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2160
2161         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2162
2163         self.update_subst()
2164         if not manual:
2165             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2166             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2167             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2168             os.write(reason_fd, reject_message)
2169             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2170         else:
2171             # Build up the rejection email
2172             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2173             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2174             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2175             self.Subst["__REJECT_MESSAGE__"] = ""
2176             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2177             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2178             # Write the rejection email out as the <foo>.reason file
2179             os.write(reason_fd, reject_mail_message)
2180
2181         del self.Subst["__REJECTOR_ADDRESS__"]
2182         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2183         del self.Subst["__CC__"]
2184
2185         os.close(reason_fd)
2186
2187         # Send the rejection mail
2188         utils.send_mail(reject_mail_message)
2189
2190         if self.logger:
2191             self.logger.log(["rejected", self.pkg.changes_file])
2192
2193         return 0
2194
2195     ################################################################################
2196     def in_override_p(self, package, component, suite, binary_type, filename, session):
2197         """
2198         Check if a package already has override entries in the DB
2199
2200         @type package: string
2201         @param package: package name
2202
2203         @type component: string
2204         @param component: database id of the component
2205
2206         @type suite: int
2207         @param suite: database id of the suite
2208
2209         @type binary_type: string
2210         @param binary_type: type of the package
2211
2212         @type filename: string
2213         @param filename: filename we check
2214
2215         @return: the database result. But noone cares anyway.
2216
2217         """
2218
2219         cnf = Config()
2220
2221         if binary_type == "": # must be source
2222             file_type = "dsc"
2223         else:
2224             file_type = binary_type
2225
2226         # Override suite name; used for example with proposed-updates
2227         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2228             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2229
2230         result = get_override(package, suite, component, file_type, session)
2231
2232         # If checking for a source package fall back on the binary override type
2233         if file_type == "dsc" and len(result) < 1:
2234             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2235
2236         # Remember the section and priority so we can check them later if appropriate
2237         if len(result) > 0:
2238             result = result[0]
2239             self.pkg.files[filename]["override section"] = result.section.section
2240             self.pkg.files[filename]["override priority"] = result.priority.priority
2241             return result
2242
2243         return None
2244
2245     ################################################################################
2246     def get_anyversion(self, sv_list, suite):
2247         """
2248         @type sv_list: list
2249         @param sv_list: list of (suite, version) tuples to check
2250
2251         @type suite: string
2252         @param suite: suite name
2253
2254         Description: TODO
2255         """
2256         Cnf = Config()
2257         anyversion = None
2258         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2259         for (s, v) in sv_list:
2260             if s in [ x.lower() for x in anysuite ]:
2261                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2262                     anyversion = v
2263
2264         return anyversion
2265
2266     ################################################################################
2267
2268     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2269         """
2270         @type sv_list: list
2271         @param sv_list: list of (suite, version) tuples to check
2272
2273         @type filename: string
2274         @param filename: XXX
2275
2276         @type new_version: string
2277         @param new_version: XXX
2278
2279         Ensure versions are newer than existing packages in target
2280         suites and that cross-suite version checking rules as
2281         set out in the conf file are satisfied.
2282         """
2283
2284         cnf = Config()
2285
2286         # Check versions for each target suite
2287         for target_suite in self.pkg.changes["distribution"].keys():
2288             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2289             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2290
2291             # Enforce "must be newer than target suite" even if conffile omits it
2292             if target_suite not in must_be_newer_than:
2293                 must_be_newer_than.append(target_suite)
2294
2295             for (suite, existent_version) in sv_list:
2296                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2297
2298                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2299                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2300
2301                 if suite in must_be_older_than and vercmp > -1:
2302                     cansave = 0
2303
2304                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2305                         # we really use the other suite, ignoring the conflicting one ...
2306                         addsuite = self.pkg.changes["distribution-version"][suite]
2307
2308                         add_version = self.get_anyversion(sv_list, addsuite)
2309                         target_version = self.get_anyversion(sv_list, target_suite)
2310
2311                         if not add_version:
2312                             # not add_version can only happen if we map to a suite
2313                             # that doesn't enhance the suite we're propup'ing from.
2314                             # so "propup-ver x a b c; map a d" is a problem only if
2315                             # d doesn't enhance a.
2316                             #
2317                             # i think we could always propagate in this case, rather
2318                             # than complaining. either way, this isn't a REJECT issue
2319                             #
2320                             # And - we really should complain to the dorks who configured dak
2321                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2322                             self.pkg.changes.setdefault("propdistribution", {})
2323                             self.pkg.changes["propdistribution"][addsuite] = 1
2324                             cansave = 1
2325                         elif not target_version:
2326                             # not targets_version is true when the package is NEW
2327                             # we could just stick with the "...old version..." REJECT
2328                             # for this, I think.
2329                             self.rejects.append("Won't propogate NEW packages.")
2330                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2331                             # propogation would be redundant. no need to reject though.
2332                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2333                             cansave = 1
2334                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2335                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2336                             # propogate!!
2337                             self.warnings.append("Propogating upload to %s" % (addsuite))
2338                             self.pkg.changes.setdefault("propdistribution", {})
2339                             self.pkg.changes["propdistribution"][addsuite] = 1
2340                             cansave = 1
2341
2342                     if not cansave:
2343                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2344
2345     ################################################################################
2346     def check_binary_against_db(self, filename, session):
2347         # Ensure version is sane
2348         q = session.query(BinAssociation)
2349         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2350         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2351
2352         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2353                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2354
2355         # Check for any existing copies of the file
2356         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2357         q = q.filter_by(version=self.pkg.files[filename]["version"])
2358         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2359
2360         if q.count() > 0:
2361             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2362
2363     ################################################################################
2364
2365     def check_source_against_db(self, filename, session):
2366         source = self.pkg.dsc.get("source")
2367         version = self.pkg.dsc.get("version")
2368
2369         # Ensure version is sane
2370         q = session.query(SrcAssociation)
2371         q = q.join(DBSource).filter(DBSource.source==source)
2372
2373         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2374                                        filename, version, sourceful=True)
2375
2376     ################################################################################
2377     def check_dsc_against_db(self, filename, session):
2378         """
2379
2380         @warning: NB: this function can remove entries from the 'files' index [if
2381          the orig tarball is a duplicate of the one in the archive]; if
2382          you're iterating over 'files' and call this function as part of
2383          the loop, be sure to add a check to the top of the loop to
2384          ensure you haven't just tried to dereference the deleted entry.
2385
2386         """
2387
2388         Cnf = Config()
2389         self.pkg.orig_files = {} # XXX: do we need to clear it?
2390         orig_files = self.pkg.orig_files
2391
2392         # Try and find all files mentioned in the .dsc.  This has
2393         # to work harder to cope with the multiple possible
2394         # locations of an .orig.tar.gz.
2395         # The ordering on the select is needed to pick the newest orig
2396         # when it exists in multiple places.
2397         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2398             found = None
2399             if self.pkg.files.has_key(dsc_name):
2400                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2401                 actual_size = int(self.pkg.files[dsc_name]["size"])
2402                 found = "%s in incoming" % (dsc_name)
2403
2404                 # Check the file does not already exist in the archive
2405                 ql = get_poolfile_like_name(dsc_name, session)
2406
2407                 # Strip out anything that isn't '%s' or '/%s$'
2408                 for i in ql:
2409                     if not i.filename.endswith(dsc_name):
2410                         ql.remove(i)
2411
2412                 # "[dak] has not broken them.  [dak] has fixed a
2413                 # brokenness.  Your crappy hack exploited a bug in
2414                 # the old dinstall.
2415                 #
2416                 # "(Come on!  I thought it was always obvious that
2417                 # one just doesn't release different files with
2418                 # the same name and version.)"
2419                 #                        -- ajk@ on d-devel@l.d.o
2420
2421                 if len(ql) > 0:
2422                     # Ignore exact matches for .orig.tar.gz
2423                     match = 0
2424                     if re_is_orig_source.match(dsc_name):
2425                         for i in ql:
2426                             if self.pkg.files.has_key(dsc_name) and \
2427                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2428                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2429                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2430                                 # TODO: Don't delete the entry, just mark it as not needed
2431                                 # This would fix the stupidity of changing something we often iterate over
2432                                 # whilst we're doing it
2433                                 del self.pkg.files[dsc_name]
2434                                 dsc_entry["files id"] = i.file_id
2435                                 if not orig_files.has_key(dsc_name):
2436                                     orig_files[dsc_name] = {}
2437                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2438                                 match = 1
2439
2440                                 # Don't bitch that we couldn't find this file later
2441                                 try:
2442                                     self.later_check_files.remove(dsc_name)
2443                                 except ValueError:
2444                                     pass
2445
2446
2447                     if not match:
2448                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2449
2450             elif re_is_orig_source.match(dsc_name):
2451                 # Check in the pool
2452                 ql = get_poolfile_like_name(dsc_name, session)
2453
2454                 # Strip out anything that isn't '%s' or '/%s$'
2455                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2456                 for i in ql:
2457                     if not i.filename.endswith(dsc_name):
2458                         ql.remove(i)
2459
2460                 if len(ql) > 0:
2461                     # Unfortunately, we may get more than one match here if,
2462                     # for example, the package was in potato but had an -sa
2463                     # upload in woody.  So we need to choose the right one.
2464
2465                     # default to something sane in case we don't match any or have only one
2466                     x = ql[0]
2467
2468                     if len(ql) > 1:
2469                         for i in ql:
2470                             old_file = os.path.join(i.location.path, i.filename)
2471                             old_file_fh = utils.open_file(old_file)
2472                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2473                             old_file_fh.close()
2474                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2475                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2476                                 x = i
2477
2478                     old_file = os.path.join(i.location.path, i.filename)
2479                     old_file_fh = utils.open_file(old_file)
2480                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2481                     old_file_fh.close()
2482                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2483                     found = old_file
2484                     suite_type = x.location.archive_type
2485                     # need this for updating dsc_files in install()
2486                     dsc_entry["files id"] = x.file_id
2487                     # See install() in process-accepted...
2488                     if not orig_files.has_key(dsc_name):
2489                         orig_files[dsc_name] = {}
2490                     orig_files[dsc_name]["id"] = x.file_id
2491                     orig_files[dsc_name]["path"] = old_file
2492                     orig_files[dsc_name]["location"] = x.location.location_id
2493                 else:
2494                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2495                     # Not there? Check the queue directories...
2496                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2497                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2498                             continue
2499                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2500                         if os.path.exists(in_otherdir):
2501                             in_otherdir_fh = utils.open_file(in_otherdir)
2502                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2503                             in_otherdir_fh.close()
2504                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2505                             found = in_otherdir
2506                             if not orig_files.has_key(dsc_name):
2507                                 orig_files[dsc_name] = {}
2508                             orig_files[dsc_name]["path"] = in_otherdir
2509
2510                     if not found:
2511                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2512                         continue
2513             else:
2514                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2515                 continue
2516             if actual_md5 != dsc_entry["md5sum"]:
2517                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2518             if actual_size != int(dsc_entry["size"]):
2519                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2520
2521     ################################################################################
2522     # This is used by process-new and process-holding to recheck a changes file
2523     # at the time we're running.  It mainly wraps various other internal functions
2524     # and is similar to accepted_checks - these should probably be tidied up
2525     # and combined
2526     def recheck(self, session):
2527         cnf = Config()
2528         for f in self.pkg.files.keys():
2529             # The .orig.tar.gz can disappear out from under us is it's a
2530             # duplicate of one in the archive.
2531             if not self.pkg.files.has_key(f):
2532                 continue
2533
2534             entry = self.pkg.files[f]
2535
2536             # Check that the source still exists
2537             if entry["type"] == "deb":
2538                 source_version = entry["source version"]
2539                 source_package = entry["source package"]
2540                 if not self.pkg.changes["architecture"].has_key("source") \
2541                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2542                     source_epochless_version = re_no_epoch.sub('', source_version)
2543                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2544                     found = False
2545                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2546                         if cnf.has_key("Dir::Queue::%s" % (q)):
2547                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2548                                 found = True
2549                     if not found:
2550                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2551
2552             # Version and file overwrite checks
2553             if entry["type"] == "deb":
2554                 self.check_binary_against_db(f, session)
2555             elif entry["type"] == "dsc":
2556                 self.check_source_against_db(f, session)
2557                 self.check_dsc_against_db(f, session)
2558
2559     ################################################################################
2560     def accepted_checks(self, overwrite_checks, session):
2561         # Recheck anything that relies on the database; since that's not
2562         # frozen between accept and our run time when called from p-a.
2563
2564         # overwrite_checks is set to False when installing to stable/oldstable
2565
2566         propogate={}
2567         nopropogate={}
2568
2569         # Find the .dsc (again)
2570         dsc_filename = None
2571         for f in self.pkg.files.keys():
2572             if self.pkg.files[f]["type"] == "dsc":
2573                 dsc_filename = f
2574
2575         for checkfile in self.pkg.files.keys():
2576             # The .orig.tar.gz can disappear out from under us is it's a
2577             # duplicate of one in the archive.
2578             if not self.pkg.files.has_key(checkfile):
2579                 continue
2580
2581             entry = self.pkg.files[checkfile]
2582
2583             # Check that the source still exists
2584             if entry["type"] == "deb":
2585                 source_version = entry["source version"]
2586                 source_package = entry["source package"]
2587                 if not self.pkg.changes["architecture"].has_key("source") \
2588                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2589                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2590
2591             # Version and file overwrite checks
2592             if overwrite_checks:
2593                 if entry["type"] == "deb":
2594                     self.check_binary_against_db(checkfile, session)
2595                 elif entry["type"] == "dsc":
2596                     self.check_source_against_db(checkfile, session)
2597                     self.check_dsc_against_db(dsc_filename, session)
2598
2599             # propogate in the case it is in the override tables:
2600             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2601                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2602                     propogate[suite] = 1
2603                 else:
2604                     nopropogate[suite] = 1
2605
2606         for suite in propogate.keys():
2607             if suite in nopropogate:
2608                 continue
2609             self.pkg.changes["distribution"][suite] = 1
2610
2611         for checkfile in self.pkg.files.keys():
2612             # Check the package is still in the override tables
2613             for suite in self.pkg.changes["distribution"].keys():
2614                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2615                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2616
2617     ################################################################################
2618     # This is not really a reject, but an unaccept, but since a) the code for
2619     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2620     # extremely rare, for now we'll go with whining at our admin folks...
2621
2622     def do_unaccept(self):
2623         cnf = Config()
2624
2625         self.update_subst()
2626         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2627         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2628         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2629         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2630         if cnf.has_key("Dinstall::Bcc"):
2631             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2632
2633         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2634
2635         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2636
2637         # Write the rejection email out as the <foo>.reason file
2638         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2639         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2640
2641         # If we fail here someone is probably trying to exploit the race
2642         # so let's just raise an exception ...
2643         if os.path.exists(reject_filename):
2644             os.unlink(reject_filename)
2645
2646         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2647         os.write(fd, reject_mail_message)
2648         os.close(fd)
2649
2650         utils.send_mail(reject_mail_message)
2651
2652         del self.Subst["__REJECTOR_ADDRESS__"]
2653         del self.Subst["__REJECT_MESSAGE__"]
2654         del self.Subst["__CC__"]
2655
2656     ################################################################################
2657     # If any file of an upload has a recent mtime then chances are good
2658     # the file is still being uploaded.
2659
2660     def upload_too_new(self):
2661         cnf = Config()
2662         too_new = False
2663         # Move back to the original directory to get accurate time stamps
2664         cwd = os.getcwd()
2665         os.chdir(self.pkg.directory)
2666         file_list = self.pkg.files.keys()
2667         file_list.extend(self.pkg.dsc_files.keys())
2668         file_list.append(self.pkg.changes_file)
2669         for f in file_list:
2670             try:
2671                 last_modified = time.time()-os.path.getmtime(f)
2672                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2673                     too_new = True
2674                     break
2675             except:
2676                 pass
2677
2678         os.chdir(cwd)
2679         return too_new