]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge branch 'master' into process-upload
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113
114     # Build up a list of potentially new things
115     for name, f in files.items():
116         # Skip byhand elements
117 #        if f["type"] == "byhand":
118 #            continue
119         pkg = f["package"]
120         priority = f["priority"]
121         section = f["section"]
122         file_type = get_type(f, session)
123         component = f["component"]
124
125         if file_type == "dsc":
126             priority = "source"
127
128         if not new.has_key(pkg):
129             new[pkg] = {}
130             new[pkg]["priority"] = priority
131             new[pkg]["section"] = section
132             new[pkg]["type"] = file_type
133             new[pkg]["component"] = component
134             new[pkg]["files"] = []
135         else:
136             old_type = new[pkg]["type"]
137             if old_type != file_type:
138                 # source gets trumped by deb or udeb
139                 if old_type == "dsc":
140                     new[pkg]["priority"] = priority
141                     new[pkg]["section"] = section
142                     new[pkg]["type"] = file_type
143                     new[pkg]["component"] = component
144
145         new[pkg]["files"].append(name)
146
147         if f.has_key("othercomponents"):
148             new[pkg]["othercomponents"] = f["othercomponents"]
149
150     # Fix up the list of target suites
151     cnf = Config()
152     for suite in changes["suite"].keys():
153         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154         if override:
155             (olderr, newerr) = (get_suite(suite, session) == None,
156                                 get_suite(override, session) == None)
157             if olderr or newerr:
158                 (oinv, newinv) = ("", "")
159                 if olderr: oinv = "invalid "
160                 if newerr: ninv = "invalid "
161                 print "warning: overriding %ssuite %s to %ssuite %s" % (
162                         oinv, suite, ninv, override)
163             del changes["suite"][suite]
164             changes["suite"][override] = 1
165
166     for suite in changes["suite"].keys():
167         for pkg in new.keys():
168             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169             if len(ql) > 0:
170                 for file_entry in new[pkg]["files"]:
171                     if files[file_entry].has_key("new"):
172                         del files[file_entry]["new"]
173                 del new[pkg]
174
175     if warn:
176         for s in ['stable', 'oldstable']:
177             if changes["suite"].has_key(s):
178                 print "WARNING: overrides will be added for %s!" % s
179         for pkg in new.keys():
180             if new[pkg].has_key("othercomponents"):
181                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
182
183     return new
184
185 ################################################################################
186
187 def check_valid(new, session = None):
188     """
189     Check if section and priority for NEW packages exist in database.
190     Additionally does sanity checks:
191       - debian-installer packages have to be udeb (or source)
192       - non debian-installer packages can not be udeb
193       - source priority can only be assigned to dsc file types
194
195     @type new: dict
196     @param new: Dict of new packages with their section, priority and type.
197
198     """
199     for pkg in new.keys():
200         section_name = new[pkg]["section"]
201         priority_name = new[pkg]["priority"]
202         file_type = new[pkg]["type"]
203
204         section = get_section(section_name, session)
205         if section is None:
206             new[pkg]["section id"] = -1
207         else:
208             new[pkg]["section id"] = section.section_id
209
210         priority = get_priority(priority_name, session)
211         if priority is None:
212             new[pkg]["priority id"] = -1
213         else:
214             new[pkg]["priority id"] = priority.priority_id
215
216         # Sanity checks
217         di = section_name.find("debian-installer") != -1
218
219         # If d-i, we must be udeb and vice-versa
220         if     (di and file_type not in ("udeb", "dsc")) or \
221            (not di and file_type == "udeb"):
222             new[pkg]["section id"] = -1
223
224         # If dsc we need to be source and vice-versa
225         if (priority == "source" and file_type != "dsc") or \
226            (priority != "source" and file_type == "dsc"):
227             new[pkg]["priority id"] = -1
228
229 ###############################################################################
230
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233     def __init__(self, future_cutoff, past_cutoff):
234         self.reset()
235         self.future_cutoff = future_cutoff
236         self.past_cutoff = past_cutoff
237
238     def reset(self):
239         self.future_files = {}
240         self.ancient_files = {}
241
242     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243         if MTime > self.future_cutoff:
244             self.future_files[Name] = MTime
245         if MTime < self.past_cutoff:
246             self.ancient_files[Name] = MTime
247
248 ###############################################################################
249
250 class Upload(object):
251     """
252     Everything that has to do with an upload processed.
253
254     """
255     def __init__(self):
256         self.logger = None
257         self.pkg = Changes()
258         self.reset()
259
260     ###########################################################################
261
262     def reset (self):
263         """ Reset a number of internal variables."""
264
265         # Initialize the substitution template map
266         cnf = Config()
267         self.Subst = {}
268         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
272
273         self.rejects = []
274         self.warnings = []
275         self.notes = []
276
277         self.later_check_files = []
278
279         self.pkg.reset()
280
281     def package_info(self):
282         """
283         Format various messages from this Upload to send to the maintainer.
284         """
285
286         msgs = (
287             ('Reject Reasons', self.rejects),
288             ('Warnings', self.warnings),
289             ('Notes', self.notes),
290         )
291
292         msg = ''
293         for title, messages in msgs:
294             if messages:
295                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296         msg += '\n\n'
297
298         return msg
299
300     ###########################################################################
301     def update_subst(self):
302         """ Set up the per-package template substitution mappings """
303
304         cnf = Config()
305
306         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307         if not self.pkg.changes.has_key("architecture") or not \
308            isinstance(self.pkg.changes["architecture"], dict):
309             self.pkg.changes["architecture"] = { "Unknown" : "" }
310
311         # and maintainer2047 may not exist.
312         if not self.pkg.changes.has_key("maintainer2047"):
313             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
314
315         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
318
319         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320         if self.pkg.changes["architecture"].has_key("source") and \
321            self.pkg.changes["changedby822"] != "" and \
322            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
323
324             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
327         else:
328             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
331
332         if "sponsoremail" in self.pkg.changes:
333             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
334
335         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
336             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
337
338         # Apply any global override of the Maintainer field
339         if cnf.get("Dinstall::OverrideMaintainer"):
340             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
341             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
342
343         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
344         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
345         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
346
347     ###########################################################################
348     def load_changes(self, filename):
349         """
350         Load a changes file and setup a dictionary around it. Also checks for mandantory
351         fields  within.
352
353         @type filename: string
354         @param filename: Changes filename, full path.
355
356         @rtype: boolean
357         @return: whether the changes file was valid or not.  We may want to
358                  reject even if this is True (see what gets put in self.rejects).
359                  This is simply to prevent us even trying things later which will
360                  fail because we couldn't properly parse the file.
361         """
362         Cnf = Config()
363         self.pkg.changes_file = filename
364
365         # Parse the .changes field into a dictionary
366         try:
367             self.pkg.changes.update(parse_changes(filename))
368         except CantOpenError:
369             self.rejects.append("%s: can't read file." % (filename))
370             return False
371         except ParseChangesError, line:
372             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373             return False
374         except ChangesUnicodeError:
375             self.rejects.append("%s: changes file not proper utf-8" % (filename))
376             return False
377
378         # Parse the Files field from the .changes into another dictionary
379         try:
380             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
381         except ParseChangesError, line:
382             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
383             return False
384         except UnknownFormatError, format:
385             self.rejects.append("%s: unknown format '%s'." % (filename, format))
386             return False
387
388         # Check for mandatory fields
389         for i in ("distribution", "source", "binary", "architecture",
390                   "version", "maintainer", "files", "changes", "description"):
391             if not self.pkg.changes.has_key(i):
392                 # Avoid undefined errors later
393                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
394                 return False
395
396         # Strip a source version in brackets from the source field
397         if re_strip_srcver.search(self.pkg.changes["source"]):
398             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
399
400         # Ensure the source field is a valid package name.
401         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
402             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
403
404         # Split multi-value fields into a lower-level dictionary
405         for i in ("architecture", "distribution", "binary", "closes"):
406             o = self.pkg.changes.get(i, "")
407             if o != "":
408                 del self.pkg.changes[i]
409
410             self.pkg.changes[i] = {}
411
412             for j in o.split():
413                 self.pkg.changes[i][j] = 1
414
415         # Fix the Maintainer: field to be RFC822/2047 compatible
416         try:
417             (self.pkg.changes["maintainer822"],
418              self.pkg.changes["maintainer2047"],
419              self.pkg.changes["maintainername"],
420              self.pkg.changes["maintaineremail"]) = \
421                    fix_maintainer (self.pkg.changes["maintainer"])
422         except ParseMaintError, msg:
423             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
424                    % (filename, self.pkg.changes["maintainer"], msg))
425
426         # ...likewise for the Changed-By: field if it exists.
427         try:
428             (self.pkg.changes["changedby822"],
429              self.pkg.changes["changedby2047"],
430              self.pkg.changes["changedbyname"],
431              self.pkg.changes["changedbyemail"]) = \
432                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
433         except ParseMaintError, msg:
434             self.pkg.changes["changedby822"] = ""
435             self.pkg.changes["changedby2047"] = ""
436             self.pkg.changes["changedbyname"] = ""
437             self.pkg.changes["changedbyemail"] = ""
438
439             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
440                    % (filename, self.pkg.changes["changed-by"], msg))
441
442         # Ensure all the values in Closes: are numbers
443         if self.pkg.changes.has_key("closes"):
444             for i in self.pkg.changes["closes"].keys():
445                 if re_isanum.match (i) == None:
446                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
447
448         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
449         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
450         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
451
452         # Check the .changes is non-empty
453         if not self.pkg.files:
454             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
455             return False
456
457         # Changes was syntactically valid even if we'll reject
458         return True
459
460     ###########################################################################
461
462     def check_distributions(self):
463         "Check and map the Distribution field"
464
465         Cnf = Config()
466
467         # Handle suite mappings
468         for m in Cnf.ValueList("SuiteMappings"):
469             args = m.split()
470             mtype = args[0]
471             if mtype == "map" or mtype == "silent-map":
472                 (source, dest) = args[1:3]
473                 if self.pkg.changes["distribution"].has_key(source):
474                     del self.pkg.changes["distribution"][source]
475                     self.pkg.changes["distribution"][dest] = 1
476                     if mtype != "silent-map":
477                         self.notes.append("Mapping %s to %s." % (source, dest))
478                 if self.pkg.changes.has_key("distribution-version"):
479                     if self.pkg.changes["distribution-version"].has_key(source):
480                         self.pkg.changes["distribution-version"][source]=dest
481             elif mtype == "map-unreleased":
482                 (source, dest) = args[1:3]
483                 if self.pkg.changes["distribution"].has_key(source):
484                     for arch in self.pkg.changes["architecture"].keys():
485                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
486                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
487                             del self.pkg.changes["distribution"][source]
488                             self.pkg.changes["distribution"][dest] = 1
489                             break
490             elif mtype == "ignore":
491                 suite = args[1]
492                 if self.pkg.changes["distribution"].has_key(suite):
493                     del self.pkg.changes["distribution"][suite]
494                     self.warnings.append("Ignoring %s as a target suite." % (suite))
495             elif mtype == "reject":
496                 suite = args[1]
497                 if self.pkg.changes["distribution"].has_key(suite):
498                     self.rejects.append("Uploads to %s are not accepted." % (suite))
499             elif mtype == "propup-version":
500                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
501                 #
502                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
503                 if self.pkg.changes["distribution"].has_key(args[1]):
504                     self.pkg.changes.setdefault("distribution-version", {})
505                     for suite in args[2:]:
506                         self.pkg.changes["distribution-version"][suite] = suite
507
508         # Ensure there is (still) a target distribution
509         if len(self.pkg.changes["distribution"].keys()) < 1:
510             self.rejects.append("No valid distribution remaining.")
511
512         # Ensure target distributions exist
513         for suite in self.pkg.changes["distribution"].keys():
514             if not Cnf.has_key("Suite::%s" % (suite)):
515                 self.rejects.append("Unknown distribution `%s'." % (suite))
516
517     ###########################################################################
518
519     def binary_file_checks(self, f, session):
520         cnf = Config()
521         entry = self.pkg.files[f]
522
523         # Extract package control information
524         deb_file = utils.open_file(f)
525         try:
526             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
527         except:
528             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
529             deb_file.close()
530             # Can't continue, none of the checks on control would work.
531             return
532
533         # Check for mandantory "Description:"
534         deb_file.seek(0)
535         try:
536             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
537         except:
538             self.rejects.append("%s: Missing Description in binary package" % (f))
539             return
540
541         deb_file.close()
542
543         # Check for mandatory fields
544         for field in [ "Package", "Architecture", "Version" ]:
545             if control.Find(field) == None:
546                 # Can't continue
547                 self.rejects.append("%s: No %s field in control." % (f, field))
548                 return
549
550         # Ensure the package name matches the one give in the .changes
551         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
552             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
553
554         # Validate the package field
555         package = control.Find("Package")
556         if not re_valid_pkg_name.match(package):
557             self.rejects.append("%s: invalid package name '%s'." % (f, package))
558
559         # Validate the version field
560         version = control.Find("Version")
561         if not re_valid_version.match(version):
562             self.rejects.append("%s: invalid version number '%s'." % (f, version))
563
564         # Ensure the architecture of the .deb is one we know about.
565         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
566         architecture = control.Find("Architecture")
567         upload_suite = self.pkg.changes["distribution"].keys()[0]
568
569         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
570             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
571             self.rejects.append("Unknown architecture '%s'." % (architecture))
572
573         # Ensure the architecture of the .deb is one of the ones
574         # listed in the .changes.
575         if not self.pkg.changes["architecture"].has_key(architecture):
576             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
577
578         # Sanity-check the Depends field
579         depends = control.Find("Depends")
580         if depends == '':
581             self.rejects.append("%s: Depends field is empty." % (f))
582
583         # Sanity-check the Provides field
584         provides = control.Find("Provides")
585         if provides:
586             provide = re_spacestrip.sub('', provides)
587             if provide == '':
588                 self.rejects.append("%s: Provides field is empty." % (f))
589             prov_list = provide.split(",")
590             for prov in prov_list:
591                 if not re_valid_pkg_name.match(prov):
592                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
593
594         # Check the section & priority match those given in the .changes (non-fatal)
595         if     control.Find("Section") and entry["section"] != "" \
596            and entry["section"] != control.Find("Section"):
597             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
598                                 (f, control.Find("Section", ""), entry["section"]))
599         if control.Find("Priority") and entry["priority"] != "" \
600            and entry["priority"] != control.Find("Priority"):
601             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
602                                 (f, control.Find("Priority", ""), entry["priority"]))
603
604         entry["package"] = package
605         entry["architecture"] = architecture
606         entry["version"] = version
607         entry["maintainer"] = control.Find("Maintainer", "")
608
609         if f.endswith(".udeb"):
610             self.pkg.files[f]["dbtype"] = "udeb"
611         elif f.endswith(".deb"):
612             self.pkg.files[f]["dbtype"] = "deb"
613         else:
614             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
615
616         entry["source"] = control.Find("Source", entry["package"])
617
618         # Get the source version
619         source = entry["source"]
620         source_version = ""
621
622         if source.find("(") != -1:
623             m = re_extract_src_version.match(source)
624             source = m.group(1)
625             source_version = m.group(2)
626
627         if not source_version:
628             source_version = self.pkg.files[f]["version"]
629
630         entry["source package"] = source
631         entry["source version"] = source_version
632
633         # Ensure the filename matches the contents of the .deb
634         m = re_isadeb.match(f)
635
636         #  package name
637         file_package = m.group(1)
638         if entry["package"] != file_package:
639             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
640                                 (f, file_package, entry["dbtype"], entry["package"]))
641         epochless_version = re_no_epoch.sub('', control.Find("Version"))
642
643         #  version
644         file_version = m.group(2)
645         if epochless_version != file_version:
646             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
647                                 (f, file_version, entry["dbtype"], epochless_version))
648
649         #  architecture
650         file_architecture = m.group(3)
651         if entry["architecture"] != file_architecture:
652             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
653                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
654
655         # Check for existent source
656         source_version = entry["source version"]
657         source_package = entry["source package"]
658         if self.pkg.changes["architecture"].has_key("source"):
659             if source_version != self.pkg.changes["version"]:
660                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
661                                     (source_version, f, self.pkg.changes["version"]))
662         else:
663             # Check in the SQL database
664             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
665                 # Check in one of the other directories
666                 source_epochless_version = re_no_epoch.sub('', source_version)
667                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
668                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
669                     entry["byhand"] = 1
670                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
671                     entry["new"] = 1
672                 else:
673                     dsc_file_exists = False
674                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
675                         if cnf.has_key("Dir::Queue::%s" % (myq)):
676                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
677                                 dsc_file_exists = True
678                                 break
679
680                     if not dsc_file_exists:
681                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
682
683         # Check the version and for file overwrites
684         self.check_binary_against_db(f, session)
685
686         # Temporarily disable contents generation until we change the table storage layout
687         #b = Binary(f)
688         #b.scan_package()
689         #if len(b.rejects) > 0:
690         #    for j in b.rejects:
691         #        self.rejects.append(j)
692
693     def source_file_checks(self, f, session):
694         entry = self.pkg.files[f]
695
696         m = re_issource.match(f)
697         if not m:
698             return
699
700         entry["package"] = m.group(1)
701         entry["version"] = m.group(2)
702         entry["type"] = m.group(3)
703
704         # Ensure the source package name matches the Source filed in the .changes
705         if self.pkg.changes["source"] != entry["package"]:
706             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
707
708         # Ensure the source version matches the version in the .changes file
709         if re_is_orig_source.match(f):
710             changes_version = self.pkg.changes["chopversion2"]
711         else:
712             changes_version = self.pkg.changes["chopversion"]
713
714         if changes_version != entry["version"]:
715             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
716
717         # Ensure the .changes lists source in the Architecture field
718         if not self.pkg.changes["architecture"].has_key("source"):
719             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
720
721         # Check the signature of a .dsc file
722         if entry["type"] == "dsc":
723             # check_signature returns either:
724             #  (None, [list, of, rejects]) or (signature, [])
725             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
726             for j in rejects:
727                 self.rejects.append(j)
728
729         entry["architecture"] = "source"
730
731     def per_suite_file_checks(self, f, suite, session):
732         cnf = Config()
733         entry = self.pkg.files[f]
734
735         # Skip byhand
736         if entry.has_key("byhand"):
737             return
738
739         # Check we have fields we need to do these checks
740         oktogo = True
741         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
742             if not entry.has_key(m):
743                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
744                 oktogo = False
745
746         if not oktogo:
747             return
748
749         # Handle component mappings
750         for m in cnf.ValueList("ComponentMappings"):
751             (source, dest) = m.split()
752             if entry["component"] == source:
753                 entry["original component"] = source
754                 entry["component"] = dest
755
756         # Ensure the component is valid for the target suite
757         if cnf.has_key("Suite:%s::Components" % (suite)) and \
758            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
759             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
760             return
761
762         # Validate the component
763         if not get_component(entry["component"], session):
764             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
765             return
766
767         # See if the package is NEW
768         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
769             entry["new"] = 1
770
771         # Validate the priority
772         if entry["priority"].find('/') != -1:
773             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
774
775         # Determine the location
776         location = cnf["Dir::Pool"]
777         l = get_location(location, entry["component"], session=session)
778         if l is None:
779             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
780             entry["location id"] = -1
781         else:
782             entry["location id"] = l.location_id
783
784         # Check the md5sum & size against existing files (if any)
785         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
786
787         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
788                                          entry["size"], entry["md5sum"], entry["location id"])
789
790         if found is None:
791             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
792         elif found is False and poolfile is not None:
793             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
794         else:
795             if poolfile is None:
796                 entry["files id"] = None
797             else:
798                 entry["files id"] = poolfile.file_id
799
800         # Check for packages that have moved from one component to another
801         entry['suite'] = suite
802         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
803         if res.rowcount > 0:
804             entry["othercomponents"] = res.fetchone()[0]
805
806     def check_files(self, action=True):
807         file_keys = self.pkg.files.keys()
808         holding = Holding()
809         cnf = Config()
810
811         if action:
812             cwd = os.getcwd()
813             os.chdir(self.pkg.directory)
814             for f in file_keys:
815                 ret = holding.copy_to_holding(f)
816                 if ret is not None:
817                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
818
819             os.chdir(cwd)
820
821         # check we already know the changes file
822         # [NB: this check must be done post-suite mapping]
823         base_filename = os.path.basename(self.pkg.changes_file)
824
825         session = DBConn().session()
826
827         try:
828             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
829             # if in the pool or in a queue other than unchecked, reject
830             if (dbc.in_queue is None) \
831                    or (dbc.in_queue is not None
832                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
833                 self.rejects.append("%s file already known to dak" % base_filename)
834         except NoResultFound, e:
835             # not known, good
836             pass
837
838         has_binaries = False
839         has_source = False
840
841         for f, entry in self.pkg.files.items():
842             # Ensure the file does not already exist in one of the accepted directories
843             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
844                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
845                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
846                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
847
848             if not re_taint_free.match(f):
849                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
850
851             # Check the file is readable
852             if os.access(f, os.R_OK) == 0:
853                 # When running in -n, copy_to_holding() won't have
854                 # generated the reject_message, so we need to.
855                 if action:
856                     if os.path.exists(f):
857                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
858                     else:
859                         # Don't directly reject, mark to check later to deal with orig's
860                         # we can find in the pool
861                         self.later_check_files.append(f)
862                 entry["type"] = "unreadable"
863                 continue
864
865             # If it's byhand skip remaining checks
866             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
867                 entry["byhand"] = 1
868                 entry["type"] = "byhand"
869
870             # Checks for a binary package...
871             elif re_isadeb.match(f):
872                 has_binaries = True
873                 entry["type"] = "deb"
874
875                 # This routine appends to self.rejects/warnings as appropriate
876                 self.binary_file_checks(f, session)
877
878             # Checks for a source package...
879             elif re_issource.match(f):
880                 has_source = True
881
882                 # This routine appends to self.rejects/warnings as appropriate
883                 self.source_file_checks(f, session)
884
885             # Not a binary or source package?  Assume byhand...
886             else:
887                 entry["byhand"] = 1
888                 entry["type"] = "byhand"
889
890             # Per-suite file checks
891             entry["oldfiles"] = {}
892             for suite in self.pkg.changes["distribution"].keys():
893                 self.per_suite_file_checks(f, suite, session)
894
895         session.close()
896
897         # If the .changes file says it has source, it must have source.
898         if self.pkg.changes["architecture"].has_key("source"):
899             if not has_source:
900                 self.rejects.append("no source found and Architecture line in changes mention source.")
901
902             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
903                 self.rejects.append("source only uploads are not supported.")
904
905     ###########################################################################
906     def check_dsc(self, action=True, session=None):
907         """Returns bool indicating whether or not the source changes are valid"""
908         # Ensure there is source to check
909         if not self.pkg.changes["architecture"].has_key("source"):
910             return True
911
912         # Find the .dsc
913         dsc_filename = None
914         for f, entry in self.pkg.files.items():
915             if entry["type"] == "dsc":
916                 if dsc_filename:
917                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
918                     return False
919                 else:
920                     dsc_filename = f
921
922         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
923         if not dsc_filename:
924             self.rejects.append("source uploads must contain a dsc file")
925             return False
926
927         # Parse the .dsc file
928         try:
929             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
930         except CantOpenError:
931             # if not -n copy_to_holding() will have done this for us...
932             if not action:
933                 self.rejects.append("%s: can't read file." % (dsc_filename))
934         except ParseChangesError, line:
935             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
936         except InvalidDscError, line:
937             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
938         except ChangesUnicodeError:
939             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
940
941         # Build up the file list of files mentioned by the .dsc
942         try:
943             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
944         except NoFilesFieldError:
945             self.rejects.append("%s: no Files: field." % (dsc_filename))
946             return False
947         except UnknownFormatError, format:
948             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
949             return False
950         except ParseChangesError, line:
951             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
952             return False
953
954         # Enforce mandatory fields
955         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
956             if not self.pkg.dsc.has_key(i):
957                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
958                 return False
959
960         # Validate the source and version fields
961         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
962             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
963         if not re_valid_version.match(self.pkg.dsc["version"]):
964             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
965
966         # Only a limited list of source formats are allowed in each suite
967         for dist in self.pkg.changes["distribution"].keys():
968             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
969             if self.pkg.dsc["format"] not in allowed:
970                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
971
972         # Validate the Maintainer field
973         try:
974             # We ignore the return value
975             fix_maintainer(self.pkg.dsc["maintainer"])
976         except ParseMaintError, msg:
977             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
978                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
979
980         # Validate the build-depends field(s)
981         for field_name in [ "build-depends", "build-depends-indep" ]:
982             field = self.pkg.dsc.get(field_name)
983             if field:
984                 # Have apt try to parse them...
985                 try:
986                     apt_pkg.ParseSrcDepends(field)
987                 except:
988                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
989
990         # Ensure the version number in the .dsc matches the version number in the .changes
991         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
992         changes_version = self.pkg.files[dsc_filename]["version"]
993
994         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
995             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
996
997         # Ensure the Files field contain only what's expected
998         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
999
1000         # Ensure source is newer than existing source in target suites
1001         session = DBConn().session()
1002         self.check_source_against_db(dsc_filename, session)
1003         self.check_dsc_against_db(dsc_filename, session)
1004         session.close()
1005
1006         # Finally, check if we're missing any files
1007         for f in self.later_check_files:
1008             self.rejects.append("Could not find file %s references in changes" % f)
1009
1010         return True
1011
1012     ###########################################################################
1013
1014     def get_changelog_versions(self, source_dir):
1015         """Extracts a the source package and (optionally) grabs the
1016         version history out of debian/changelog for the BTS."""
1017
1018         cnf = Config()
1019
1020         # Find the .dsc (again)
1021         dsc_filename = None
1022         for f in self.pkg.files.keys():
1023             if self.pkg.files[f]["type"] == "dsc":
1024                 dsc_filename = f
1025
1026         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1027         if not dsc_filename:
1028             return
1029
1030         # Create a symlink mirror of the source files in our temporary directory
1031         for f in self.pkg.files.keys():
1032             m = re_issource.match(f)
1033             if m:
1034                 src = os.path.join(source_dir, f)
1035                 # If a file is missing for whatever reason, give up.
1036                 if not os.path.exists(src):
1037                     return
1038                 ftype = m.group(3)
1039                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1040                    self.pkg.orig_files[f].has_key("path"):
1041                     continue
1042                 dest = os.path.join(os.getcwd(), f)
1043                 os.symlink(src, dest)
1044
1045         # If the orig files are not a part of the upload, create symlinks to the
1046         # existing copies.
1047         for orig_file in self.pkg.orig_files.keys():
1048             if not self.pkg.orig_files[orig_file].has_key("path"):
1049                 continue
1050             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1051             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1052
1053         # Extract the source
1054         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1055         (result, output) = commands.getstatusoutput(cmd)
1056         if (result != 0):
1057             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1058             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1059             return
1060
1061         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1062             return
1063
1064         # Get the upstream version
1065         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1066         if re_strip_revision.search(upstr_version):
1067             upstr_version = re_strip_revision.sub('', upstr_version)
1068
1069         # Ensure the changelog file exists
1070         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1071         if not os.path.exists(changelog_filename):
1072             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1073             return
1074
1075         # Parse the changelog
1076         self.pkg.dsc["bts changelog"] = ""
1077         changelog_file = utils.open_file(changelog_filename)
1078         for line in changelog_file.readlines():
1079             m = re_changelog_versions.match(line)
1080             if m:
1081                 self.pkg.dsc["bts changelog"] += line
1082         changelog_file.close()
1083
1084         # Check we found at least one revision in the changelog
1085         if not self.pkg.dsc["bts changelog"]:
1086             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1087
1088     def check_source(self):
1089         # Bail out if:
1090         #    a) there's no source
1091         if not self.pkg.changes["architecture"].has_key("source"):
1092             return
1093
1094         tmpdir = utils.temp_dirname()
1095
1096         # Move into the temporary directory
1097         cwd = os.getcwd()
1098         os.chdir(tmpdir)
1099
1100         # Get the changelog version history
1101         self.get_changelog_versions(cwd)
1102
1103         # Move back and cleanup the temporary tree
1104         os.chdir(cwd)
1105
1106         try:
1107             shutil.rmtree(tmpdir)
1108         except OSError, e:
1109             if e.errno != errno.EACCES:
1110                 print "foobar"
1111                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1112
1113             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1114             # We probably have u-r or u-w directories so chmod everything
1115             # and try again.
1116             cmd = "chmod -R u+rwx %s" % (tmpdir)
1117             result = os.system(cmd)
1118             if result != 0:
1119                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1120             shutil.rmtree(tmpdir)
1121         except Exception, e:
1122             print "foobar2 (%s)" % e
1123             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1124
1125     ###########################################################################
1126     def ensure_hashes(self):
1127         # Make sure we recognise the format of the Files: field in the .changes
1128         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1129         if len(format) == 2:
1130             format = int(format[0]), int(format[1])
1131         else:
1132             format = int(float(format[0])), 0
1133
1134         # We need to deal with the original changes blob, as the fields we need
1135         # might not be in the changes dict serialised into the .dak anymore.
1136         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1137
1138         # Copy the checksums over to the current changes dict.  This will keep
1139         # the existing modifications to it intact.
1140         for field in orig_changes:
1141             if field.startswith('checksums-'):
1142                 self.pkg.changes[field] = orig_changes[field]
1143
1144         # Check for unsupported hashes
1145         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1146             self.rejects.append(j)
1147
1148         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1149             self.rejects.append(j)
1150
1151         # We have to calculate the hash if we have an earlier changes version than
1152         # the hash appears in rather than require it exist in the changes file
1153         for hashname, hashfunc, version in utils.known_hashes:
1154             # TODO: Move _ensure_changes_hash into this class
1155             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1156                 self.rejects.append(j)
1157             if "source" in self.pkg.changes["architecture"]:
1158                 # TODO: Move _ensure_dsc_hash into this class
1159                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1160                     self.rejects.append(j)
1161
1162     def check_hashes(self):
1163         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1164             self.rejects.append(m)
1165
1166         for m in utils.check_size(".changes", self.pkg.files):
1167             self.rejects.append(m)
1168
1169         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1170             self.rejects.append(m)
1171
1172         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1173             self.rejects.append(m)
1174
1175         self.ensure_hashes()
1176
1177     ###########################################################################
1178
1179     def ensure_orig(self, target_dir='.', session=None):
1180         """
1181         Ensures that all orig files mentioned in the changes file are present
1182         in target_dir. If they do not exist, they are symlinked into place.
1183
1184         An list containing the symlinks that were created are returned (so they
1185         can be removed).
1186         """
1187
1188         symlinked = []
1189         cnf = Config()
1190
1191         for filename, entry in self.pkg.dsc_files.iteritems():
1192             if not re_is_orig_source.match(filename):
1193                 # File is not an orig; ignore
1194                 continue
1195
1196             if os.path.exists(filename):
1197                 # File exists, no need to continue
1198                 continue
1199
1200             def symlink_if_valid(path):
1201                 f = utils.open_file(path)
1202                 md5sum = apt_pkg.md5sum(f)
1203                 f.close()
1204
1205                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1206                 expected = (int(entry['size']), entry['md5sum'])
1207
1208                 if fingerprint != expected:
1209                     return False
1210
1211                 dest = os.path.join(target_dir, filename)
1212
1213                 os.symlink(path, dest)
1214                 symlinked.append(dest)
1215
1216                 return True
1217
1218             session_ = session
1219             if session is None:
1220                 session_ = DBConn().session()
1221
1222             found = False
1223
1224             # Look in the pool
1225             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1226                 poolfile_path = os.path.join(
1227                     poolfile.location.path, poolfile.filename
1228                 )
1229
1230                 if symlink_if_valid(poolfile_path):
1231                     found = True
1232                     break
1233
1234             if session is None:
1235                 session_.close()
1236
1237             if found:
1238                 continue
1239
1240             # Look in some other queues for the file
1241             queues = ('New', 'Byhand', 'ProposedUpdates',
1242                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1243
1244             for queue in queues:
1245                 if not cnf.get('Dir::Queue::%s' % queue):
1246                     continue
1247
1248                 queuefile_path = os.path.join(
1249                     cnf['Dir::Queue::%s' % queue], filename
1250                 )
1251
1252                 if not os.path.exists(queuefile_path):
1253                     # Does not exist in this queue
1254                     continue
1255
1256                 if symlink_if_valid(queuefile_path):
1257                     break
1258
1259         return symlinked
1260
1261     ###########################################################################
1262
1263     def check_lintian(self):
1264         """
1265         Extends self.rejects by checking the output of lintian against tags
1266         specified in Dinstall::LintianTags.
1267         """
1268
1269         cnf = Config()
1270
1271         # Don't reject binary uploads
1272         if not self.pkg.changes['architecture'].has_key('source'):
1273             return
1274
1275         # Only check some distributions
1276         for dist in ('unstable', 'experimental'):
1277             if dist in self.pkg.changes['distribution']:
1278                 break
1279         else:
1280             return
1281
1282         # If we do not have a tagfile, don't do anything
1283         tagfile = cnf.get("Dinstall::LintianTags")
1284         if tagfile is None:
1285             return
1286
1287         # Parse the yaml file
1288         sourcefile = file(tagfile, 'r')
1289         sourcecontent = sourcefile.read()
1290         sourcefile.close()
1291
1292         try:
1293             lintiantags = yaml.load(sourcecontent)['lintian']
1294         except yaml.YAMLError, msg:
1295             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1296             return
1297
1298         # Try and find all orig mentioned in the .dsc
1299         symlinked = self.ensure_orig()
1300
1301         # Setup the input file for lintian
1302         fd, temp_filename = utils.temp_filename()
1303         temptagfile = os.fdopen(fd, 'w')
1304         for tags in lintiantags.values():
1305             temptagfile.writelines(['%s\n' % x for x in tags])
1306         temptagfile.close()
1307
1308         try:
1309             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1310                 (temp_filename, self.pkg.changes_file)
1311
1312             result, output = commands.getstatusoutput(cmd)
1313         finally:
1314             # Remove our tempfile and any symlinks we created
1315             os.unlink(temp_filename)
1316
1317             for symlink in symlinked:
1318                 os.unlink(symlink)
1319
1320         if result == 2:
1321             utils.warn("lintian failed for %s [return code: %s]." % \
1322                 (self.pkg.changes_file, result))
1323             utils.warn(utils.prefix_multi_line_string(output, \
1324                 " [possible output:] "))
1325
1326         def log(*txt):
1327             if self.logger:
1328                 self.logger.log(
1329                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1330                 )
1331
1332         # Generate messages
1333         parsed_tags = parse_lintian_output(output)
1334         self.rejects.extend(
1335             generate_reject_messages(parsed_tags, lintiantags, log=log)
1336         )
1337
1338     ###########################################################################
1339     def check_urgency(self):
1340         cnf = Config()
1341         if self.pkg.changes["architecture"].has_key("source"):
1342             if not self.pkg.changes.has_key("urgency"):
1343                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1344             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1345             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1346                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1347                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1348                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1349
1350     ###########################################################################
1351
1352     # Sanity check the time stamps of files inside debs.
1353     # [Files in the near future cause ugly warnings and extreme time
1354     #  travel can cause errors on extraction]
1355
1356     def check_timestamps(self):
1357         Cnf = Config()
1358
1359         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1360         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1361         tar = TarTime(future_cutoff, past_cutoff)
1362
1363         for filename, entry in self.pkg.files.items():
1364             if entry["type"] == "deb":
1365                 tar.reset()
1366                 try:
1367                     deb_file = utils.open_file(filename)
1368                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1369                     deb_file.seek(0)
1370                     try:
1371                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1372                     except SystemError, e:
1373                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1374                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1375                             raise
1376                         deb_file.seek(0)
1377                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1378
1379                     deb_file.close()
1380
1381                     future_files = tar.future_files.keys()
1382                     if future_files:
1383                         num_future_files = len(future_files)
1384                         future_file = future_files[0]
1385                         future_date = tar.future_files[future_file]
1386                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1387                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1388
1389                     ancient_files = tar.ancient_files.keys()
1390                     if ancient_files:
1391                         num_ancient_files = len(ancient_files)
1392                         ancient_file = ancient_files[0]
1393                         ancient_date = tar.ancient_files[ancient_file]
1394                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1395                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1396                 except:
1397                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1398
1399     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1400         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1401             sponsored = False
1402         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1403             sponsored = False
1404             if uid_name == "":
1405                 sponsored = True
1406         else:
1407             sponsored = True
1408             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1409                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1410                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1411                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1412                         self.pkg.changes["sponsoremail"] = uid_email
1413
1414         return sponsored
1415
1416
1417     ###########################################################################
1418     # check_signed_by_key checks
1419     ###########################################################################
1420
1421     def check_signed_by_key(self):
1422         """Ensure the .changes is signed by an authorized uploader."""
1423         session = DBConn().session()
1424
1425         # First of all we check that the person has proper upload permissions
1426         # and that this upload isn't blocked
1427         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1428
1429         if fpr is None:
1430             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1431             return
1432
1433         # TODO: Check that import-keyring adds UIDs properly
1434         if not fpr.uid:
1435             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1436             return
1437
1438         # Check that the fingerprint which uploaded has permission to do so
1439         self.check_upload_permissions(fpr, session)
1440
1441         # Check that this package is not in a transition
1442         self.check_transition(session)
1443
1444         session.close()
1445
1446
1447     def check_upload_permissions(self, fpr, session):
1448         # Check any one-off upload blocks
1449         self.check_upload_blocks(fpr, session)
1450
1451         # Start with DM as a special case
1452         # DM is a special case unfortunately, so we check it first
1453         # (keys with no source access get more access than DMs in one
1454         #  way; DMs can only upload for their packages whether source
1455         #  or binary, whereas keys with no access might be able to
1456         #  upload some binaries)
1457         if fpr.source_acl.access_level == 'dm':
1458             self.check_dm_upload(fpr, session)
1459         else:
1460             # Check source-based permissions for other types
1461             if self.pkg.changes["architecture"].has_key("source") and \
1462                 fpr.source_acl.access_level is None:
1463                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1464                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1465                 self.rejects.append(rej)
1466                 return
1467             # If not a DM, we allow full upload rights
1468             uid_email = "%s@debian.org" % (fpr.uid.uid)
1469             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1470
1471
1472         # Check binary upload permissions
1473         # By this point we know that DMs can't have got here unless they
1474         # are allowed to deal with the package concerned so just apply
1475         # normal checks
1476         if fpr.binary_acl.access_level == 'full':
1477             return
1478
1479         # Otherwise we're in the map case
1480         tmparches = self.pkg.changes["architecture"].copy()
1481         tmparches.pop('source', None)
1482
1483         for bam in fpr.binary_acl_map:
1484             tmparches.pop(bam.architecture.arch_string, None)
1485
1486         if len(tmparches.keys()) > 0:
1487             if fpr.binary_reject:
1488                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1489                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1490                 self.rejects.append(rej)
1491             else:
1492                 # TODO: This is where we'll implement reject vs throw away binaries later
1493                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1494                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1495                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1496                 self.rejects.append(rej)
1497
1498
1499     def check_upload_blocks(self, fpr, session):
1500         """Check whether any upload blocks apply to this source, source
1501            version, uid / fpr combination"""
1502
1503         def block_rej_template(fb):
1504             rej = 'Manual upload block in place for package %s' % fb.source
1505             if fb.version is not None:
1506                 rej += ', version %s' % fb.version
1507             return rej
1508
1509         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1510             # version is None if the block applies to all versions
1511             if fb.version is None or fb.version == self.pkg.changes['version']:
1512                 # Check both fpr and uid - either is enough to cause a reject
1513                 if fb.fpr is not None:
1514                     if fb.fpr.fingerprint == fpr.fingerprint:
1515                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1516                 if fb.uid is not None:
1517                     if fb.uid == fpr.uid:
1518                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1519
1520
1521     def check_dm_upload(self, fpr, session):
1522         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1523         ## none of the uploaded packages are NEW
1524         rej = False
1525         for f in self.pkg.files.keys():
1526             if self.pkg.files[f].has_key("byhand"):
1527                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1528                 rej = True
1529             if self.pkg.files[f].has_key("new"):
1530                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1531                 rej = True
1532
1533         if rej:
1534             return
1535
1536         ## the most recent version of the package uploaded to unstable or
1537         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1538         ## section of its control file
1539         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1540         q = q.join(SrcAssociation)
1541         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1542         q = q.order_by(desc('source.version')).limit(1)
1543
1544         r = q.all()
1545
1546         if len(r) != 1:
1547             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1548             self.rejects.append(rej)
1549             return
1550
1551         r = r[0]
1552         if not r.dm_upload_allowed:
1553             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1554             self.rejects.append(rej)
1555             return
1556
1557         ## the Maintainer: field of the uploaded .changes file corresponds with
1558         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1559         ## uploads)
1560         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1561             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1562
1563         ## the most recent version of the package uploaded to unstable or
1564         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1565         ## non-developer maintainers cannot NMU or hijack packages)
1566
1567         # srcuploaders includes the maintainer
1568         accept = False
1569         for sup in r.srcuploaders:
1570             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1571             # Eww - I hope we never have two people with the same name in Debian
1572             if email == fpr.uid.uid or name == fpr.uid.name:
1573                 accept = True
1574                 break
1575
1576         if not accept:
1577             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1578             return
1579
1580         ## none of the packages are being taken over from other source packages
1581         for b in self.pkg.changes["binary"].keys():
1582             for suite in self.pkg.changes["distribution"].keys():
1583                 q = session.query(DBSource)
1584                 q = q.join(DBBinary).filter_by(package=b)
1585                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1586
1587                 for s in q.all():
1588                     if s.source != self.pkg.changes["source"]:
1589                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1590
1591
1592
1593     def check_transition(self, session):
1594         cnf = Config()
1595
1596         sourcepkg = self.pkg.changes["source"]
1597
1598         # No sourceful upload -> no need to do anything else, direct return
1599         # We also work with unstable uploads, not experimental or those going to some
1600         # proposed-updates queue
1601         if "source" not in self.pkg.changes["architecture"] or \
1602            "unstable" not in self.pkg.changes["distribution"]:
1603             return
1604
1605         # Also only check if there is a file defined (and existant) with
1606         # checks.
1607         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1608         if transpath == "" or not os.path.exists(transpath):
1609             return
1610
1611         # Parse the yaml file
1612         sourcefile = file(transpath, 'r')
1613         sourcecontent = sourcefile.read()
1614         try:
1615             transitions = yaml.load(sourcecontent)
1616         except yaml.YAMLError, msg:
1617             # This shouldn't happen, there is a wrapper to edit the file which
1618             # checks it, but we prefer to be safe than ending up rejecting
1619             # everything.
1620             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1621             return
1622
1623         # Now look through all defined transitions
1624         for trans in transitions:
1625             t = transitions[trans]
1626             source = t["source"]
1627             expected = t["new"]
1628
1629             # Will be None if nothing is in testing.
1630             current = get_source_in_suite(source, "testing", session)
1631             if current is not None:
1632                 compare = apt_pkg.VersionCompare(current.version, expected)
1633
1634             if current is None or compare < 0:
1635                 # This is still valid, the current version in testing is older than
1636                 # the new version we wait for, or there is none in testing yet
1637
1638                 # Check if the source we look at is affected by this.
1639                 if sourcepkg in t['packages']:
1640                     # The source is affected, lets reject it.
1641
1642                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1643                         sourcepkg, trans)
1644
1645                     if current is not None:
1646                         currentlymsg = "at version %s" % (current.version)
1647                     else:
1648                         currentlymsg = "not present in testing"
1649
1650                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1651
1652                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1653 is part of a testing transition designed to get %s migrated (it is
1654 currently %s, we need version %s).  This transition is managed by the
1655 Release Team, and %s is the Release-Team member responsible for it.
1656 Please mail debian-release@lists.debian.org or contact %s directly if you
1657 need further assistance.  You might want to upload to experimental until this
1658 transition is done."""
1659                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1660
1661                     self.rejects.append(rejectmsg)
1662                     return
1663
1664     ###########################################################################
1665     # End check_signed_by_key checks
1666     ###########################################################################
1667
1668     def build_summaries(self):
1669         """ Build a summary of changes the upload introduces. """
1670
1671         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1672
1673         short_summary = summary
1674
1675         # This is for direport's benefit...
1676         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1677
1678         if byhand or new:
1679             summary += "Changes: " + f
1680
1681         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1682
1683         summary += self.announce(short_summary, 0)
1684
1685         return (summary, short_summary)
1686
1687     ###########################################################################
1688
1689     def close_bugs(self, summary, action):
1690         """
1691         Send mail to close bugs as instructed by the closes field in the changes file.
1692         Also add a line to summary if any work was done.
1693
1694         @type summary: string
1695         @param summary: summary text, as given by L{build_summaries}
1696
1697         @type action: bool
1698         @param action: Set to false no real action will be done.
1699
1700         @rtype: string
1701         @return: summary. If action was taken, extended by the list of closed bugs.
1702
1703         """
1704
1705         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1706
1707         bugs = self.pkg.changes["closes"].keys()
1708
1709         if not bugs:
1710             return summary
1711
1712         bugs.sort()
1713         summary += "Closing bugs: "
1714         for bug in bugs:
1715             summary += "%s " % (bug)
1716             if action:
1717                 self.update_subst()
1718                 self.Subst["__BUG_NUMBER__"] = bug
1719                 if self.pkg.changes["distribution"].has_key("stable"):
1720                     self.Subst["__STABLE_WARNING__"] = """
1721 Note that this package is not part of the released stable Debian
1722 distribution.  It may have dependencies on other unreleased software,
1723 or other instabilities.  Please take care if you wish to install it.
1724 The update will eventually make its way into the next released Debian
1725 distribution."""
1726                 else:
1727                     self.Subst["__STABLE_WARNING__"] = ""
1728                 mail_message = utils.TemplateSubst(self.Subst, template)
1729                 utils.send_mail(mail_message)
1730
1731                 # Clear up after ourselves
1732                 del self.Subst["__BUG_NUMBER__"]
1733                 del self.Subst["__STABLE_WARNING__"]
1734
1735         if action and self.logger:
1736             self.logger.log(["closing bugs"] + bugs)
1737
1738         summary += "\n"
1739
1740         return summary
1741
1742     ###########################################################################
1743
1744     def announce(self, short_summary, action):
1745         """
1746         Send an announce mail about a new upload.
1747
1748         @type short_summary: string
1749         @param short_summary: Short summary text to include in the mail
1750
1751         @type action: bool
1752         @param action: Set to false no real action will be done.
1753
1754         @rtype: string
1755         @return: Textstring about action taken.
1756
1757         """
1758
1759         cnf = Config()
1760         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1761
1762         # Only do announcements for source uploads with a recent dpkg-dev installed
1763         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1764            self.pkg.changes["architecture"].has_key("source"):
1765             return ""
1766
1767         lists_done = {}
1768         summary = ""
1769
1770         self.Subst["__SHORT_SUMMARY__"] = short_summary
1771
1772         for dist in self.pkg.changes["distribution"].keys():
1773             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1774             if announce_list == "" or lists_done.has_key(announce_list):
1775                 continue
1776
1777             lists_done[announce_list] = 1
1778             summary += "Announcing to %s\n" % (announce_list)
1779
1780             if action:
1781                 self.update_subst()
1782                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1783                 if cnf.get("Dinstall::TrackingServer") and \
1784                    self.pkg.changes["architecture"].has_key("source"):
1785                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1786                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1787
1788                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1789                 utils.send_mail(mail_message)
1790
1791                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1792
1793         if cnf.FindB("Dinstall::CloseBugs"):
1794             summary = self.close_bugs(summary, action)
1795
1796         del self.Subst["__SHORT_SUMMARY__"]
1797
1798         return summary
1799
1800     ###########################################################################
1801     @session_wrapper
1802     def accept (self, summary, short_summary, session=None):
1803         """
1804         Accept an upload.
1805
1806         This moves all files referenced from the .changes into the pool,
1807         sends the accepted mail, announces to lists, closes bugs and
1808         also checks for override disparities. If enabled it will write out
1809         the version history for the BTS Version Tracking and will finally call
1810         L{queue_build}.
1811
1812         @type summary: string
1813         @param summary: Summary text
1814
1815         @type short_summary: string
1816         @param short_summary: Short summary
1817         """
1818
1819         cnf = Config()
1820         stats = SummaryStats()
1821
1822         print "Installing."
1823         self.logger.log(["installing changes", self.pkg.changes_file])
1824
1825         poolfiles = []
1826
1827         # Add the .dsc file to the DB first
1828         for newfile, entry in self.pkg.files.items():
1829             if entry["type"] == "dsc":
1830                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1831                 for j in pfs:
1832                     poolfiles.append(j)
1833
1834         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1835         for newfile, entry in self.pkg.files.items():
1836             if entry["type"] == "deb":
1837                 poolfiles.append(add_deb_to_db(self, newfile, session))
1838
1839         # If this is a sourceful diff only upload that is moving
1840         # cross-component we need to copy the .orig files into the new
1841         # component too for the same reasons as above.
1842         # XXX: mhy: I think this should be in add_dsc_to_db
1843         if self.pkg.changes["architecture"].has_key("source"):
1844             for orig_file in self.pkg.orig_files.keys():
1845                 if not self.pkg.orig_files[orig_file].has_key("id"):
1846                     continue # Skip if it's not in the pool
1847                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1848                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1849                     continue # Skip if the location didn't change
1850
1851                 # Do the move
1852                 oldf = get_poolfile_by_id(orig_file_id, session)
1853                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1854                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1855                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1856
1857                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1858
1859                 # TODO: Care about size/md5sum collisions etc
1860                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1861
1862                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1863                 if newf is None:
1864                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1865                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1866
1867                     session.flush()
1868
1869                     # Don't reference the old file from this changes
1870                     for p in poolfiles:
1871                         if p.file_id == oldf.file_id:
1872                             poolfiles.remove(p)
1873
1874                     poolfiles.append(newf)
1875
1876                     # Fix up the DSC references
1877                     toremove = []
1878
1879                     for df in source.srcfiles:
1880                         if df.poolfile.file_id == oldf.file_id:
1881                             # Add a new DSC entry and mark the old one for deletion
1882                             # Don't do it in the loop so we don't change the thing we're iterating over
1883                             newdscf = DSCFile()
1884                             newdscf.source_id = source.source_id
1885                             newdscf.poolfile_id = newf.file_id
1886                             session.add(newdscf)
1887
1888                             toremove.append(df)
1889
1890                     for df in toremove:
1891                         session.delete(df)
1892
1893                     # Flush our changes
1894                     session.flush()
1895
1896                     # Make sure that our source object is up-to-date
1897                     session.expire(source)
1898
1899         # Add changelog information to the database
1900         self.store_changelog()
1901
1902         # Install the files into the pool
1903         for newfile, entry in self.pkg.files.items():
1904             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1905             utils.move(newfile, destination)
1906             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1907             stats.accept_bytes += float(entry["size"])
1908
1909         # Copy the .changes file across for suite which need it.
1910         copy_changes = {}
1911         for suite_name in self.pkg.changes["distribution"].keys():
1912             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1913                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1914
1915         for dest in copy_changes.keys():
1916             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1917
1918         # We're done - commit the database changes
1919         session.commit()
1920         # Our SQL session will automatically start a new transaction after
1921         # the last commit
1922
1923         # Move the .changes into the 'done' directory
1924         utils.move(self.pkg.changes_file,
1925                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1926
1927         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1928             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1929
1930         self.update_subst()
1931         self.Subst["__SUITE__"] = ""
1932         self.Subst["__SUMMARY__"] = summary
1933         mail_message = utils.TemplateSubst(self.Subst,
1934                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1935         utils.send_mail(mail_message)
1936         self.announce(short_summary, 1)
1937
1938         ## Helper stuff for DebBugs Version Tracking
1939         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1940             if self.pkg.changes["architecture"].has_key("source"):
1941                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1942                 version_history = os.fdopen(fd, 'w')
1943                 version_history.write(self.pkg.dsc["bts changelog"])
1944                 version_history.close()
1945                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1946                                       self.pkg.changes_file[:-8]+".versions")
1947                 os.rename(temp_filename, filename)
1948                 os.chmod(filename, 0644)
1949
1950             # Write out the binary -> source mapping.
1951             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952             debinfo = os.fdopen(fd, 'w')
1953             for name, entry in sorted(self.pkg.files.items()):
1954                 if entry["type"] == "deb":
1955                     line = " ".join([entry["package"], entry["version"],
1956                                      entry["architecture"], entry["source package"],
1957                                      entry["source version"]])
1958                     debinfo.write(line+"\n")
1959             debinfo.close()
1960             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1961                                   self.pkg.changes_file[:-8]+".debinfo")
1962             os.rename(temp_filename, filename)
1963             os.chmod(filename, 0644)
1964
1965         session.commit()
1966
1967         # Set up our copy queues (e.g. buildd queues)
1968         for suite_name in self.pkg.changes["distribution"].keys():
1969             suite = get_suite(suite_name, session)
1970             for q in suite.copy_queues:
1971                 for f in poolfiles:
1972                     q.add_file_from_pool(f)
1973
1974         session.commit()
1975
1976         # Finally...
1977         stats.accept_count += 1
1978
1979     def check_override(self):
1980         """
1981         Checks override entries for validity. Mails "Override disparity" warnings,
1982         if that feature is enabled.
1983
1984         Abandons the check if
1985           - override disparity checks are disabled
1986           - mail sending is disabled
1987         """
1988
1989         cnf = Config()
1990
1991         # Abandon the check if override disparity checks have been disabled
1992         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1993             return
1994
1995         summary = self.pkg.check_override()
1996
1997         if summary == "":
1998             return
1999
2000         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2001
2002         self.update_subst()
2003         self.Subst["__SUMMARY__"] = summary
2004         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2005         utils.send_mail(mail_message)
2006         del self.Subst["__SUMMARY__"]
2007
2008     ###########################################################################
2009
2010     def remove(self, from_dir=None):
2011         """
2012         Used (for instance) in p-u to remove the package from unchecked
2013
2014         Also removes the package from holding area.
2015         """
2016         if from_dir is None:
2017             from_dir = self.pkg.directory
2018         h = Holding()
2019
2020         for f in self.pkg.files.keys():
2021             os.unlink(os.path.join(from_dir, f))
2022             if os.path.exists(os.path.join(h.holding_dir, f)):
2023                 os.unlink(os.path.join(h.holding_dir, f))
2024
2025         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2026         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2027             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2028
2029     ###########################################################################
2030
2031     def move_to_queue (self, queue):
2032         """
2033         Move files to a destination queue using the permissions in the table
2034         """
2035         h = Holding()
2036         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2037                    queue.path, perms=int(queue.change_perms, 8))
2038         for f in self.pkg.files.keys():
2039             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2040
2041     ###########################################################################
2042
2043     def force_reject(self, reject_files):
2044         """
2045         Forcefully move files from the current directory to the
2046         reject directory.  If any file already exists in the reject
2047         directory it will be moved to the morgue to make way for
2048         the new file.
2049
2050         @type reject_files: dict
2051         @param reject_files: file dictionary
2052
2053         """
2054
2055         cnf = Config()
2056
2057         for file_entry in reject_files:
2058             # Skip any files which don't exist or which we don't have permission to copy.
2059             if os.access(file_entry, os.R_OK) == 0:
2060                 continue
2061
2062             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2063
2064             try:
2065                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2066             except OSError, e:
2067                 # File exists?  Let's find a new name by adding a number
2068                 if e.errno == errno.EEXIST:
2069                     try:
2070                         dest_file = utils.find_next_free(dest_file, 255)
2071                     except NoFreeFilenameError:
2072                         # Something's either gone badly Pete Tong, or
2073                         # someone is trying to exploit us.
2074                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2075                         return
2076
2077                     # Make sure we really got it
2078                     try:
2079                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2080                     except OSError, e:
2081                         # Likewise
2082                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2083                         return
2084                 else:
2085                     raise
2086             # If we got here, we own the destination file, so we can
2087             # safely overwrite it.
2088             utils.move(file_entry, dest_file, 1, perms=0660)
2089             os.close(dest_fd)
2090
2091     ###########################################################################
2092     def do_reject (self, manual=0, reject_message="", notes=""):
2093         """
2094         Reject an upload. If called without a reject message or C{manual} is
2095         true, spawn an editor so the user can write one.
2096
2097         @type manual: bool
2098         @param manual: manual or automated rejection
2099
2100         @type reject_message: string
2101         @param reject_message: A reject message
2102
2103         @return: 0
2104
2105         """
2106         # If we weren't given a manual rejection message, spawn an
2107         # editor so the user can add one in...
2108         if manual and not reject_message:
2109             (fd, temp_filename) = utils.temp_filename()
2110             temp_file = os.fdopen(fd, 'w')
2111             if len(notes) > 0:
2112                 for note in notes:
2113                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2114                                     % (note.author, note.version, note.notedate, note.comment))
2115             temp_file.close()
2116             editor = os.environ.get("EDITOR","vi")
2117             answer = 'E'
2118             while answer == 'E':
2119                 os.system("%s %s" % (editor, temp_filename))
2120                 temp_fh = utils.open_file(temp_filename)
2121                 reject_message = "".join(temp_fh.readlines())
2122                 temp_fh.close()
2123                 print "Reject message:"
2124                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2125                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2126                 answer = "XXX"
2127                 while prompt.find(answer) == -1:
2128                     answer = utils.our_raw_input(prompt)
2129                     m = re_default_answer.search(prompt)
2130                     if answer == "":
2131                         answer = m.group(1)
2132                     answer = answer[:1].upper()
2133             os.unlink(temp_filename)
2134             if answer == 'A':
2135                 return 1
2136             elif answer == 'Q':
2137                 sys.exit(0)
2138
2139         print "Rejecting.\n"
2140
2141         cnf = Config()
2142
2143         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2144         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2145
2146         # Move all the files into the reject directory
2147         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2148         self.force_reject(reject_files)
2149
2150         # If we fail here someone is probably trying to exploit the race
2151         # so let's just raise an exception ...
2152         if os.path.exists(reason_filename):
2153             os.unlink(reason_filename)
2154         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2155
2156         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2157
2158         self.update_subst()
2159         if not manual:
2160             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2161             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2162             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2163             os.write(reason_fd, reject_message)
2164             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2165         else:
2166             # Build up the rejection email
2167             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2168             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2169             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2170             self.Subst["__REJECT_MESSAGE__"] = ""
2171             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2172             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2173             # Write the rejection email out as the <foo>.reason file
2174             os.write(reason_fd, reject_mail_message)
2175
2176         del self.Subst["__REJECTOR_ADDRESS__"]
2177         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2178         del self.Subst["__CC__"]
2179
2180         os.close(reason_fd)
2181
2182         # Send the rejection mail
2183         utils.send_mail(reject_mail_message)
2184
2185         if self.logger:
2186             self.logger.log(["rejected", self.pkg.changes_file])
2187
2188         return 0
2189
2190     ################################################################################
2191     def in_override_p(self, package, component, suite, binary_type, filename, session):
2192         """
2193         Check if a package already has override entries in the DB
2194
2195         @type package: string
2196         @param package: package name
2197
2198         @type component: string
2199         @param component: database id of the component
2200
2201         @type suite: int
2202         @param suite: database id of the suite
2203
2204         @type binary_type: string
2205         @param binary_type: type of the package
2206
2207         @type filename: string
2208         @param filename: filename we check
2209
2210         @return: the database result. But noone cares anyway.
2211
2212         """
2213
2214         cnf = Config()
2215
2216         if binary_type == "": # must be source
2217             file_type = "dsc"
2218         else:
2219             file_type = binary_type
2220
2221         # Override suite name; used for example with proposed-updates
2222         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2223             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2224
2225         result = get_override(package, suite, component, file_type, session)
2226
2227         # If checking for a source package fall back on the binary override type
2228         if file_type == "dsc" and len(result) < 1:
2229             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2230
2231         # Remember the section and priority so we can check them later if appropriate
2232         if len(result) > 0:
2233             result = result[0]
2234             self.pkg.files[filename]["override section"] = result.section.section
2235             self.pkg.files[filename]["override priority"] = result.priority.priority
2236             return result
2237
2238         return None
2239
2240     ################################################################################
2241     def get_anyversion(self, sv_list, suite):
2242         """
2243         @type sv_list: list
2244         @param sv_list: list of (suite, version) tuples to check
2245
2246         @type suite: string
2247         @param suite: suite name
2248
2249         Description: TODO
2250         """
2251         Cnf = Config()
2252         anyversion = None
2253         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2254         for (s, v) in sv_list:
2255             if s in [ x.lower() for x in anysuite ]:
2256                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2257                     anyversion = v
2258
2259         return anyversion
2260
2261     ################################################################################
2262
2263     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2264         """
2265         @type sv_list: list
2266         @param sv_list: list of (suite, version) tuples to check
2267
2268         @type filename: string
2269         @param filename: XXX
2270
2271         @type new_version: string
2272         @param new_version: XXX
2273
2274         Ensure versions are newer than existing packages in target
2275         suites and that cross-suite version checking rules as
2276         set out in the conf file are satisfied.
2277         """
2278
2279         cnf = Config()
2280
2281         # Check versions for each target suite
2282         for target_suite in self.pkg.changes["distribution"].keys():
2283             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2284             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2285
2286             # Enforce "must be newer than target suite" even if conffile omits it
2287             if target_suite not in must_be_newer_than:
2288                 must_be_newer_than.append(target_suite)
2289
2290             for (suite, existent_version) in sv_list:
2291                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2292
2293                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2294                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2295
2296                 if suite in must_be_older_than and vercmp > -1:
2297                     cansave = 0
2298
2299                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2300                         # we really use the other suite, ignoring the conflicting one ...
2301                         addsuite = self.pkg.changes["distribution-version"][suite]
2302
2303                         add_version = self.get_anyversion(sv_list, addsuite)
2304                         target_version = self.get_anyversion(sv_list, target_suite)
2305
2306                         if not add_version:
2307                             # not add_version can only happen if we map to a suite
2308                             # that doesn't enhance the suite we're propup'ing from.
2309                             # so "propup-ver x a b c; map a d" is a problem only if
2310                             # d doesn't enhance a.
2311                             #
2312                             # i think we could always propagate in this case, rather
2313                             # than complaining. either way, this isn't a REJECT issue
2314                             #
2315                             # And - we really should complain to the dorks who configured dak
2316                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2317                             self.pkg.changes.setdefault("propdistribution", {})
2318                             self.pkg.changes["propdistribution"][addsuite] = 1
2319                             cansave = 1
2320                         elif not target_version:
2321                             # not targets_version is true when the package is NEW
2322                             # we could just stick with the "...old version..." REJECT
2323                             # for this, I think.
2324                             self.rejects.append("Won't propogate NEW packages.")
2325                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2326                             # propogation would be redundant. no need to reject though.
2327                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2328                             cansave = 1
2329                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2330                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2331                             # propogate!!
2332                             self.warnings.append("Propogating upload to %s" % (addsuite))
2333                             self.pkg.changes.setdefault("propdistribution", {})
2334                             self.pkg.changes["propdistribution"][addsuite] = 1
2335                             cansave = 1
2336
2337                     if not cansave:
2338                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2339
2340     ################################################################################
2341     def check_binary_against_db(self, filename, session):
2342         # Ensure version is sane
2343         q = session.query(BinAssociation)
2344         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2345         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2346
2347         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2348                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2349
2350         # Check for any existing copies of the file
2351         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2352         q = q.filter_by(version=self.pkg.files[filename]["version"])
2353         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2354
2355         if q.count() > 0:
2356             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2357
2358     ################################################################################
2359
2360     def check_source_against_db(self, filename, session):
2361         source = self.pkg.dsc.get("source")
2362         version = self.pkg.dsc.get("version")
2363
2364         # Ensure version is sane
2365         q = session.query(SrcAssociation)
2366         q = q.join(DBSource).filter(DBSource.source==source)
2367
2368         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2369                                        filename, version, sourceful=True)
2370
2371     ################################################################################
2372     def check_dsc_against_db(self, filename, session):
2373         """
2374
2375         @warning: NB: this function can remove entries from the 'files' index [if
2376          the orig tarball is a duplicate of the one in the archive]; if
2377          you're iterating over 'files' and call this function as part of
2378          the loop, be sure to add a check to the top of the loop to
2379          ensure you haven't just tried to dereference the deleted entry.
2380
2381         """
2382
2383         Cnf = Config()
2384         self.pkg.orig_files = {} # XXX: do we need to clear it?
2385         orig_files = self.pkg.orig_files
2386
2387         # Try and find all files mentioned in the .dsc.  This has
2388         # to work harder to cope with the multiple possible
2389         # locations of an .orig.tar.gz.
2390         # The ordering on the select is needed to pick the newest orig
2391         # when it exists in multiple places.
2392         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2393             found = None
2394             if self.pkg.files.has_key(dsc_name):
2395                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2396                 actual_size = int(self.pkg.files[dsc_name]["size"])
2397                 found = "%s in incoming" % (dsc_name)
2398
2399                 # Check the file does not already exist in the archive
2400                 ql = get_poolfile_like_name(dsc_name, session)
2401
2402                 # Strip out anything that isn't '%s' or '/%s$'
2403                 for i in ql:
2404                     if not i.filename.endswith(dsc_name):
2405                         ql.remove(i)
2406
2407                 # "[dak] has not broken them.  [dak] has fixed a
2408                 # brokenness.  Your crappy hack exploited a bug in
2409                 # the old dinstall.
2410                 #
2411                 # "(Come on!  I thought it was always obvious that
2412                 # one just doesn't release different files with
2413                 # the same name and version.)"
2414                 #                        -- ajk@ on d-devel@l.d.o
2415
2416                 if len(ql) > 0:
2417                     # Ignore exact matches for .orig.tar.gz
2418                     match = 0
2419                     if re_is_orig_source.match(dsc_name):
2420                         for i in ql:
2421                             if self.pkg.files.has_key(dsc_name) and \
2422                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2423                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2424                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2425                                 # TODO: Don't delete the entry, just mark it as not needed
2426                                 # This would fix the stupidity of changing something we often iterate over
2427                                 # whilst we're doing it
2428                                 del self.pkg.files[dsc_name]
2429                                 dsc_entry["files id"] = i.file_id
2430                                 if not orig_files.has_key(dsc_name):
2431                                     orig_files[dsc_name] = {}
2432                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2433                                 match = 1
2434
2435                                 # Don't bitch that we couldn't find this file later
2436                                 try:
2437                                     self.later_check_files.remove(dsc_name)
2438                                 except ValueError:
2439                                     pass
2440
2441
2442                     if not match:
2443                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2444
2445             elif re_is_orig_source.match(dsc_name):
2446                 # Check in the pool
2447                 ql = get_poolfile_like_name(dsc_name, session)
2448
2449                 # Strip out anything that isn't '%s' or '/%s$'
2450                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2451                 for i in ql:
2452                     if not i.filename.endswith(dsc_name):
2453                         ql.remove(i)
2454
2455                 if len(ql) > 0:
2456                     # Unfortunately, we may get more than one match here if,
2457                     # for example, the package was in potato but had an -sa
2458                     # upload in woody.  So we need to choose the right one.
2459
2460                     # default to something sane in case we don't match any or have only one
2461                     x = ql[0]
2462
2463                     if len(ql) > 1:
2464                         for i in ql:
2465                             old_file = os.path.join(i.location.path, i.filename)
2466                             old_file_fh = utils.open_file(old_file)
2467                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2468                             old_file_fh.close()
2469                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2470                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2471                                 x = i
2472
2473                     old_file = os.path.join(i.location.path, i.filename)
2474                     old_file_fh = utils.open_file(old_file)
2475                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2476                     old_file_fh.close()
2477                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2478                     found = old_file
2479                     suite_type = x.location.archive_type
2480                     # need this for updating dsc_files in install()
2481                     dsc_entry["files id"] = x.file_id
2482                     # See install() in process-accepted...
2483                     if not orig_files.has_key(dsc_name):
2484                         orig_files[dsc_name] = {}
2485                     orig_files[dsc_name]["id"] = x.file_id
2486                     orig_files[dsc_name]["path"] = old_file
2487                     orig_files[dsc_name]["location"] = x.location.location_id
2488                 else:
2489                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2490                     # Not there? Check the queue directories...
2491                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2492                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2493                             continue
2494                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2495                         if os.path.exists(in_otherdir):
2496                             in_otherdir_fh = utils.open_file(in_otherdir)
2497                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2498                             in_otherdir_fh.close()
2499                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2500                             found = in_otherdir
2501                             if not orig_files.has_key(dsc_name):
2502                                 orig_files[dsc_name] = {}
2503                             orig_files[dsc_name]["path"] = in_otherdir
2504
2505                     if not found:
2506                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2507                         continue
2508             else:
2509                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2510                 continue
2511             if actual_md5 != dsc_entry["md5sum"]:
2512                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2513             if actual_size != int(dsc_entry["size"]):
2514                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2515
2516     ################################################################################
2517     # This is used by process-new and process-holding to recheck a changes file
2518     # at the time we're running.  It mainly wraps various other internal functions
2519     # and is similar to accepted_checks - these should probably be tidied up
2520     # and combined
2521     def recheck(self, session):
2522         cnf = Config()
2523         for f in self.pkg.files.keys():
2524             # The .orig.tar.gz can disappear out from under us is it's a
2525             # duplicate of one in the archive.
2526             if not self.pkg.files.has_key(f):
2527                 continue
2528
2529             entry = self.pkg.files[f]
2530
2531             # Check that the source still exists
2532             if entry["type"] == "deb":
2533                 source_version = entry["source version"]
2534                 source_package = entry["source package"]
2535                 if not self.pkg.changes["architecture"].has_key("source") \
2536                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2537                     source_epochless_version = re_no_epoch.sub('', source_version)
2538                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2539                     found = False
2540                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2541                         if cnf.has_key("Dir::Queue::%s" % (q)):
2542                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2543                                 found = True
2544                     if not found:
2545                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2546
2547             # Version and file overwrite checks
2548             if entry["type"] == "deb":
2549                 self.check_binary_against_db(f, session)
2550             elif entry["type"] == "dsc":
2551                 self.check_source_against_db(f, session)
2552                 self.check_dsc_against_db(f, session)
2553
2554     ################################################################################
2555     def accepted_checks(self, overwrite_checks, session):
2556         # Recheck anything that relies on the database; since that's not
2557         # frozen between accept and our run time when called from p-a.
2558
2559         # overwrite_checks is set to False when installing to stable/oldstable
2560
2561         propogate={}
2562         nopropogate={}
2563
2564         # Find the .dsc (again)
2565         dsc_filename = None
2566         for f in self.pkg.files.keys():
2567             if self.pkg.files[f]["type"] == "dsc":
2568                 dsc_filename = f
2569
2570         for checkfile in self.pkg.files.keys():
2571             # The .orig.tar.gz can disappear out from under us is it's a
2572             # duplicate of one in the archive.
2573             if not self.pkg.files.has_key(checkfile):
2574                 continue
2575
2576             entry = self.pkg.files[checkfile]
2577
2578             # Check that the source still exists
2579             if entry["type"] == "deb":
2580                 source_version = entry["source version"]
2581                 source_package = entry["source package"]
2582                 if not self.pkg.changes["architecture"].has_key("source") \
2583                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2584                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2585
2586             # Version and file overwrite checks
2587             if overwrite_checks:
2588                 if entry["type"] == "deb":
2589                     self.check_binary_against_db(checkfile, session)
2590                 elif entry["type"] == "dsc":
2591                     self.check_source_against_db(checkfile, session)
2592                     self.check_dsc_against_db(dsc_filename, session)
2593
2594             # propogate in the case it is in the override tables:
2595             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2596                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2597                     propogate[suite] = 1
2598                 else:
2599                     nopropogate[suite] = 1
2600
2601         for suite in propogate.keys():
2602             if suite in nopropogate:
2603                 continue
2604             self.pkg.changes["distribution"][suite] = 1
2605
2606         for checkfile in self.pkg.files.keys():
2607             # Check the package is still in the override tables
2608             for suite in self.pkg.changes["distribution"].keys():
2609                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2610                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2611
2612     ################################################################################
2613     # This is not really a reject, but an unaccept, but since a) the code for
2614     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2615     # extremely rare, for now we'll go with whining at our admin folks...
2616
2617     def do_unaccept(self):
2618         cnf = Config()
2619
2620         self.update_subst()
2621         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2622         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2623         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2624         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2625         if cnf.has_key("Dinstall::Bcc"):
2626             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2627
2628         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2629
2630         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2631
2632         # Write the rejection email out as the <foo>.reason file
2633         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2634         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2635
2636         # If we fail here someone is probably trying to exploit the race
2637         # so let's just raise an exception ...
2638         if os.path.exists(reject_filename):
2639             os.unlink(reject_filename)
2640
2641         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2642         os.write(fd, reject_mail_message)
2643         os.close(fd)
2644
2645         utils.send_mail(reject_mail_message)
2646
2647         del self.Subst["__REJECTOR_ADDRESS__"]
2648         del self.Subst["__REJECT_MESSAGE__"]
2649         del self.Subst["__CC__"]
2650
2651     ################################################################################
2652     # If any file of an upload has a recent mtime then chances are good
2653     # the file is still being uploaded.
2654
2655     def upload_too_new(self):
2656         cnf = Config()
2657         too_new = False
2658         # Move back to the original directory to get accurate time stamps
2659         cwd = os.getcwd()
2660         os.chdir(self.pkg.directory)
2661         file_list = self.pkg.files.keys()
2662         file_list.extend(self.pkg.dsc_files.keys())
2663         file_list.append(self.pkg.changes_file)
2664         for f in file_list:
2665             try:
2666                 last_modified = time.time()-os.path.getmtime(f)
2667                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2668                     too_new = True
2669                     break
2670             except:
2671                 pass
2672
2673         os.chdir(cwd)
2674         return too_new
2675
2676     def store_changelog(self):
2677
2678         # Skip binary-only upload if it is not a bin-NMU
2679         if not self.pkg.changes['architecture'].has_key('source'):
2680             from daklib.regexes import re_bin_only_nmu
2681             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2682                 return
2683
2684         session = DBConn().session()
2685
2686         # Check if upload already has a changelog entry
2687         query = """SELECT changelog_id FROM changes WHERE source = :source
2688                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2689         if session.execute(query, {'source': self.pkg.changes['source'], \
2690                                    'version': self.pkg.changes['version'], \
2691                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2692             session.commit()
2693             return
2694
2695         # Add current changelog text into changelogs_text table, return created ID
2696         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2697         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2698
2699         # Link ID to the upload available in changes table
2700         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2701                    AND version = :version AND architecture = :architecture"""
2702         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2703                                 'version': self.pkg.changes['version'], \
2704                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2705
2706         session.commit()