]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge branch 'master' into bpo
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113
114     # Build up a list of potentially new things
115     for name, f in files.items():
116         # Skip byhand elements
117 #        if f["type"] == "byhand":
118 #            continue
119         pkg = f["package"]
120         priority = f["priority"]
121         section = f["section"]
122         file_type = get_type(f, session)
123         component = f["component"]
124
125         if file_type == "dsc":
126             priority = "source"
127
128         if not new.has_key(pkg):
129             new[pkg] = {}
130             new[pkg]["priority"] = priority
131             new[pkg]["section"] = section
132             new[pkg]["type"] = file_type
133             new[pkg]["component"] = component
134             new[pkg]["files"] = []
135         else:
136             old_type = new[pkg]["type"]
137             if old_type != file_type:
138                 # source gets trumped by deb or udeb
139                 if old_type == "dsc":
140                     new[pkg]["priority"] = priority
141                     new[pkg]["section"] = section
142                     new[pkg]["type"] = file_type
143                     new[pkg]["component"] = component
144
145         new[pkg]["files"].append(name)
146
147         if f.has_key("othercomponents"):
148             new[pkg]["othercomponents"] = f["othercomponents"]
149
150     # Fix up the list of target suites
151     cnf = Config()
152     for suite in changes["suite"].keys():
153         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154         if override:
155             (olderr, newerr) = (get_suite(suite, session) == None,
156                                 get_suite(override, session) == None)
157             if olderr or newerr:
158                 (oinv, newinv) = ("", "")
159                 if olderr: oinv = "invalid "
160                 if newerr: ninv = "invalid "
161                 print "warning: overriding %ssuite %s to %ssuite %s" % (
162                         oinv, suite, ninv, override)
163             del changes["suite"][suite]
164             changes["suite"][override] = 1
165
166     for suite in changes["suite"].keys():
167         for pkg in new.keys():
168             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169             if len(ql) > 0:
170                 for file_entry in new[pkg]["files"]:
171                     if files[file_entry].has_key("new"):
172                         del files[file_entry]["new"]
173                 del new[pkg]
174
175     if warn:
176         for s in ['stable', 'oldstable']:
177             if changes["suite"].has_key(s):
178                 print "WARNING: overrides will be added for %s!" % s
179         for pkg in new.keys():
180             if new[pkg].has_key("othercomponents"):
181                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
182
183     return new
184
185 ################################################################################
186
187 def check_valid(new, session = None):
188     """
189     Check if section and priority for NEW packages exist in database.
190     Additionally does sanity checks:
191       - debian-installer packages have to be udeb (or source)
192       - non debian-installer packages can not be udeb
193       - source priority can only be assigned to dsc file types
194
195     @type new: dict
196     @param new: Dict of new packages with their section, priority and type.
197
198     """
199     for pkg in new.keys():
200         section_name = new[pkg]["section"]
201         priority_name = new[pkg]["priority"]
202         file_type = new[pkg]["type"]
203
204         section = get_section(section_name, session)
205         if section is None:
206             new[pkg]["section id"] = -1
207         else:
208             new[pkg]["section id"] = section.section_id
209
210         priority = get_priority(priority_name, session)
211         if priority is None:
212             new[pkg]["priority id"] = -1
213         else:
214             new[pkg]["priority id"] = priority.priority_id
215
216         # Sanity checks
217         di = section_name.find("debian-installer") != -1
218
219         # If d-i, we must be udeb and vice-versa
220         if     (di and file_type not in ("udeb", "dsc")) or \
221            (not di and file_type == "udeb"):
222             new[pkg]["section id"] = -1
223
224         # If dsc we need to be source and vice-versa
225         if (priority == "source" and file_type != "dsc") or \
226            (priority != "source" and file_type == "dsc"):
227             new[pkg]["priority id"] = -1
228
229 ###############################################################################
230
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233     def __init__(self, future_cutoff, past_cutoff):
234         self.reset()
235         self.future_cutoff = future_cutoff
236         self.past_cutoff = past_cutoff
237
238     def reset(self):
239         self.future_files = {}
240         self.ancient_files = {}
241
242     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243         if MTime > self.future_cutoff:
244             self.future_files[Name] = MTime
245         if MTime < self.past_cutoff:
246             self.ancient_files[Name] = MTime
247
248 ###############################################################################
249
250 class Upload(object):
251     """
252     Everything that has to do with an upload processed.
253
254     """
255     def __init__(self):
256         self.logger = None
257         self.pkg = Changes()
258         self.reset()
259
260     ###########################################################################
261
262     def reset (self):
263         """ Reset a number of internal variables."""
264
265         # Initialize the substitution template map
266         cnf = Config()
267         self.Subst = {}
268         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
272
273         self.rejects = []
274         self.warnings = []
275         self.notes = []
276
277         self.later_check_files = []
278
279         self.pkg.reset()
280
281     def package_info(self):
282         """
283         Format various messages from this Upload to send to the maintainer.
284         """
285
286         msgs = (
287             ('Reject Reasons', self.rejects),
288             ('Warnings', self.warnings),
289             ('Notes', self.notes),
290         )
291
292         msg = ''
293         for title, messages in msgs:
294             if messages:
295                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296         msg += '\n\n'
297
298         return msg
299
300     ###########################################################################
301     def update_subst(self):
302         """ Set up the per-package template substitution mappings """
303
304         cnf = Config()
305
306         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307         if not self.pkg.changes.has_key("architecture") or not \
308            isinstance(self.pkg.changes["architecture"], dict):
309             self.pkg.changes["architecture"] = { "Unknown" : "" }
310
311         # and maintainer2047 may not exist.
312         if not self.pkg.changes.has_key("maintainer2047"):
313             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
314
315         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
318
319         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320         if self.pkg.changes["architecture"].has_key("source") and \
321            self.pkg.changes["changedby822"] != "" and \
322            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
323
324             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
327         else:
328             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
331
332         # Process policy doesn't set the fingerprint field and I don't want to make it
333         # do it for now as I don't want to have to deal with the case where we accepted
334         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
335         # the meantime so the package will be remarked as rejectable.  Urgh.
336         # TODO: Fix this properly
337         if self.pkg.changes.has_key('fingerprint'):
338             session = DBConn().session()
339             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
340             if self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
341                 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
342             session.close()
343
344         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
345             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
346
347         # Apply any global override of the Maintainer field
348         if cnf.get("Dinstall::OverrideMaintainer"):
349             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
350             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
351
352         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
353         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
354         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
355
356     ###########################################################################
357     def load_changes(self, filename):
358         """
359         Load a changes file and setup a dictionary around it. Also checks for mandantory
360         fields  within.
361
362         @type filename: string
363         @param filename: Changes filename, full path.
364
365         @rtype: boolean
366         @return: whether the changes file was valid or not.  We may want to
367                  reject even if this is True (see what gets put in self.rejects).
368                  This is simply to prevent us even trying things later which will
369                  fail because we couldn't properly parse the file.
370         """
371         Cnf = Config()
372         self.pkg.changes_file = filename
373
374         # Parse the .changes field into a dictionary
375         try:
376             self.pkg.changes.update(parse_changes(filename))
377         except CantOpenError:
378             self.rejects.append("%s: can't read file." % (filename))
379             return False
380         except ParseChangesError, line:
381             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
382             return False
383         except ChangesUnicodeError:
384             self.rejects.append("%s: changes file not proper utf-8" % (filename))
385             return False
386
387         # Parse the Files field from the .changes into another dictionary
388         try:
389             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
390         except ParseChangesError, line:
391             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
392             return False
393         except UnknownFormatError, format:
394             self.rejects.append("%s: unknown format '%s'." % (filename, format))
395             return False
396
397         # Check for mandatory fields
398         for i in ("distribution", "source", "binary", "architecture",
399                   "version", "maintainer", "files", "changes", "description"):
400             if not self.pkg.changes.has_key(i):
401                 # Avoid undefined errors later
402                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
403                 return False
404
405         # Strip a source version in brackets from the source field
406         if re_strip_srcver.search(self.pkg.changes["source"]):
407             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
408
409         # Ensure the source field is a valid package name.
410         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
411             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
412
413         # Split multi-value fields into a lower-level dictionary
414         for i in ("architecture", "distribution", "binary", "closes"):
415             o = self.pkg.changes.get(i, "")
416             if o != "":
417                 del self.pkg.changes[i]
418
419             self.pkg.changes[i] = {}
420
421             for j in o.split():
422                 self.pkg.changes[i][j] = 1
423
424         # Fix the Maintainer: field to be RFC822/2047 compatible
425         try:
426             (self.pkg.changes["maintainer822"],
427              self.pkg.changes["maintainer2047"],
428              self.pkg.changes["maintainername"],
429              self.pkg.changes["maintaineremail"]) = \
430                    fix_maintainer (self.pkg.changes["maintainer"])
431         except ParseMaintError, msg:
432             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
433                    % (filename, self.pkg.changes["maintainer"], msg))
434
435         # ...likewise for the Changed-By: field if it exists.
436         try:
437             (self.pkg.changes["changedby822"],
438              self.pkg.changes["changedby2047"],
439              self.pkg.changes["changedbyname"],
440              self.pkg.changes["changedbyemail"]) = \
441                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
442         except ParseMaintError, msg:
443             self.pkg.changes["changedby822"] = ""
444             self.pkg.changes["changedby2047"] = ""
445             self.pkg.changes["changedbyname"] = ""
446             self.pkg.changes["changedbyemail"] = ""
447
448             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
449                    % (filename, self.pkg.changes["changed-by"], msg))
450
451         # Ensure all the values in Closes: are numbers
452         if self.pkg.changes.has_key("closes"):
453             for i in self.pkg.changes["closes"].keys():
454                 if re_isanum.match (i) == None:
455                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
456
457         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
458         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
459         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
460
461         # Check the .changes is non-empty
462         if not self.pkg.files:
463             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
464             return False
465
466         # Changes was syntactically valid even if we'll reject
467         return True
468
469     ###########################################################################
470
471     def check_distributions(self):
472         "Check and map the Distribution field"
473
474         Cnf = Config()
475
476         # Handle suite mappings
477         for m in Cnf.ValueList("SuiteMappings"):
478             args = m.split()
479             mtype = args[0]
480             if mtype == "map" or mtype == "silent-map":
481                 (source, dest) = args[1:3]
482                 if self.pkg.changes["distribution"].has_key(source):
483                     del self.pkg.changes["distribution"][source]
484                     self.pkg.changes["distribution"][dest] = 1
485                     if mtype != "silent-map":
486                         self.notes.append("Mapping %s to %s." % (source, dest))
487                 if self.pkg.changes.has_key("distribution-version"):
488                     if self.pkg.changes["distribution-version"].has_key(source):
489                         self.pkg.changes["distribution-version"][source]=dest
490             elif mtype == "map-unreleased":
491                 (source, dest) = args[1:3]
492                 if self.pkg.changes["distribution"].has_key(source):
493                     for arch in self.pkg.changes["architecture"].keys():
494                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
495                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
496                             del self.pkg.changes["distribution"][source]
497                             self.pkg.changes["distribution"][dest] = 1
498                             break
499             elif mtype == "ignore":
500                 suite = args[1]
501                 if self.pkg.changes["distribution"].has_key(suite):
502                     del self.pkg.changes["distribution"][suite]
503                     self.warnings.append("Ignoring %s as a target suite." % (suite))
504             elif mtype == "reject":
505                 suite = args[1]
506                 if self.pkg.changes["distribution"].has_key(suite):
507                     self.rejects.append("Uploads to %s are not accepted." % (suite))
508             elif mtype == "propup-version":
509                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
510                 #
511                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
512                 if self.pkg.changes["distribution"].has_key(args[1]):
513                     self.pkg.changes.setdefault("distribution-version", {})
514                     for suite in args[2:]:
515                         self.pkg.changes["distribution-version"][suite] = suite
516
517         # Ensure there is (still) a target distribution
518         if len(self.pkg.changes["distribution"].keys()) < 1:
519             self.rejects.append("No valid distribution remaining.")
520
521         # Ensure target distributions exist
522         for suite in self.pkg.changes["distribution"].keys():
523             if not Cnf.has_key("Suite::%s" % (suite)):
524                 self.rejects.append("Unknown distribution `%s'." % (suite))
525
526     ###########################################################################
527
528     def binary_file_checks(self, f, session):
529         cnf = Config()
530         entry = self.pkg.files[f]
531
532         # Extract package control information
533         deb_file = utils.open_file(f)
534         try:
535             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
536         except:
537             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
538             deb_file.close()
539             # Can't continue, none of the checks on control would work.
540             return
541
542         # Check for mandantory "Description:"
543         deb_file.seek(0)
544         try:
545             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
546         except:
547             self.rejects.append("%s: Missing Description in binary package" % (f))
548             return
549
550         deb_file.close()
551
552         # Check for mandatory fields
553         for field in [ "Package", "Architecture", "Version" ]:
554             if control.Find(field) == None:
555                 # Can't continue
556                 self.rejects.append("%s: No %s field in control." % (f, field))
557                 return
558
559         # Ensure the package name matches the one give in the .changes
560         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
561             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
562
563         # Validate the package field
564         package = control.Find("Package")
565         if not re_valid_pkg_name.match(package):
566             self.rejects.append("%s: invalid package name '%s'." % (f, package))
567
568         # Validate the version field
569         version = control.Find("Version")
570         if not re_valid_version.match(version):
571             self.rejects.append("%s: invalid version number '%s'." % (f, version))
572
573         # Ensure the architecture of the .deb is one we know about.
574         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
575         architecture = control.Find("Architecture")
576         upload_suite = self.pkg.changes["distribution"].keys()[0]
577
578         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
579             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
580             self.rejects.append("Unknown architecture '%s'." % (architecture))
581
582         # Ensure the architecture of the .deb is one of the ones
583         # listed in the .changes.
584         if not self.pkg.changes["architecture"].has_key(architecture):
585             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
586
587         # Sanity-check the Depends field
588         depends = control.Find("Depends")
589         if depends == '':
590             self.rejects.append("%s: Depends field is empty." % (f))
591
592         # Sanity-check the Provides field
593         provides = control.Find("Provides")
594         if provides:
595             provide = re_spacestrip.sub('', provides)
596             if provide == '':
597                 self.rejects.append("%s: Provides field is empty." % (f))
598             prov_list = provide.split(",")
599             for prov in prov_list:
600                 if not re_valid_pkg_name.match(prov):
601                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
602
603         # Check the section & priority match those given in the .changes (non-fatal)
604         if     control.Find("Section") and entry["section"] != "" \
605            and entry["section"] != control.Find("Section"):
606             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
607                                 (f, control.Find("Section", ""), entry["section"]))
608         if control.Find("Priority") and entry["priority"] != "" \
609            and entry["priority"] != control.Find("Priority"):
610             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
611                                 (f, control.Find("Priority", ""), entry["priority"]))
612
613         entry["package"] = package
614         entry["architecture"] = architecture
615         entry["version"] = version
616         entry["maintainer"] = control.Find("Maintainer", "")
617
618         if f.endswith(".udeb"):
619             self.pkg.files[f]["dbtype"] = "udeb"
620         elif f.endswith(".deb"):
621             self.pkg.files[f]["dbtype"] = "deb"
622         else:
623             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
624
625         entry["source"] = control.Find("Source", entry["package"])
626
627         # Get the source version
628         source = entry["source"]
629         source_version = ""
630
631         if source.find("(") != -1:
632             m = re_extract_src_version.match(source)
633             source = m.group(1)
634             source_version = m.group(2)
635
636         if not source_version:
637             source_version = self.pkg.files[f]["version"]
638
639         entry["source package"] = source
640         entry["source version"] = source_version
641
642         # Ensure the filename matches the contents of the .deb
643         m = re_isadeb.match(f)
644
645         #  package name
646         file_package = m.group(1)
647         if entry["package"] != file_package:
648             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
649                                 (f, file_package, entry["dbtype"], entry["package"]))
650         epochless_version = re_no_epoch.sub('', control.Find("Version"))
651
652         #  version
653         file_version = m.group(2)
654         if epochless_version != file_version:
655             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
656                                 (f, file_version, entry["dbtype"], epochless_version))
657
658         #  architecture
659         file_architecture = m.group(3)
660         if entry["architecture"] != file_architecture:
661             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
662                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
663
664         # Check for existent source
665         source_version = entry["source version"]
666         source_package = entry["source package"]
667         if self.pkg.changes["architecture"].has_key("source"):
668             if source_version != self.pkg.changes["version"]:
669                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
670                                     (source_version, f, self.pkg.changes["version"]))
671         else:
672             # Check in the SQL database
673             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
674                 # Check in one of the other directories
675                 source_epochless_version = re_no_epoch.sub('', source_version)
676                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
677                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
678                     entry["byhand"] = 1
679                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
680                     entry["new"] = 1
681                 else:
682                     dsc_file_exists = False
683                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
684                         if cnf.has_key("Dir::Queue::%s" % (myq)):
685                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
686                                 dsc_file_exists = True
687                                 break
688
689                     if not dsc_file_exists:
690                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
691
692         # Check the version and for file overwrites
693         self.check_binary_against_db(f, session)
694
695         # Temporarily disable contents generation until we change the table storage layout
696         #b = Binary(f)
697         #b.scan_package()
698         #if len(b.rejects) > 0:
699         #    for j in b.rejects:
700         #        self.rejects.append(j)
701
702     def source_file_checks(self, f, session):
703         entry = self.pkg.files[f]
704
705         m = re_issource.match(f)
706         if not m:
707             return
708
709         entry["package"] = m.group(1)
710         entry["version"] = m.group(2)
711         entry["type"] = m.group(3)
712
713         # Ensure the source package name matches the Source filed in the .changes
714         if self.pkg.changes["source"] != entry["package"]:
715             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
716
717         # Ensure the source version matches the version in the .changes file
718         if re_is_orig_source.match(f):
719             changes_version = self.pkg.changes["chopversion2"]
720         else:
721             changes_version = self.pkg.changes["chopversion"]
722
723         if changes_version != entry["version"]:
724             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
725
726         # Ensure the .changes lists source in the Architecture field
727         if not self.pkg.changes["architecture"].has_key("source"):
728             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
729
730         # Check the signature of a .dsc file
731         if entry["type"] == "dsc":
732             # check_signature returns either:
733             #  (None, [list, of, rejects]) or (signature, [])
734             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
735             for j in rejects:
736                 self.rejects.append(j)
737
738         entry["architecture"] = "source"
739
740     def per_suite_file_checks(self, f, suite, session):
741         cnf = Config()
742         entry = self.pkg.files[f]
743
744         # Skip byhand
745         if entry.has_key("byhand"):
746             return
747
748         # Check we have fields we need to do these checks
749         oktogo = True
750         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
751             if not entry.has_key(m):
752                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
753                 oktogo = False
754
755         if not oktogo:
756             return
757
758         # Handle component mappings
759         for m in cnf.ValueList("ComponentMappings"):
760             (source, dest) = m.split()
761             if entry["component"] == source:
762                 entry["original component"] = source
763                 entry["component"] = dest
764
765         # Ensure the component is valid for the target suite
766         if cnf.has_key("Suite:%s::Components" % (suite)) and \
767            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
768             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
769             return
770
771         # Validate the component
772         if not get_component(entry["component"], session):
773             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
774             return
775
776         # See if the package is NEW
777         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
778             entry["new"] = 1
779
780         # Validate the priority
781         if entry["priority"].find('/') != -1:
782             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
783
784         # Determine the location
785         location = cnf["Dir::Pool"]
786         l = get_location(location, entry["component"], session=session)
787         if l is None:
788             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
789             entry["location id"] = -1
790         else:
791             entry["location id"] = l.location_id
792
793         # Check the md5sum & size against existing files (if any)
794         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
795
796         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
797                                          entry["size"], entry["md5sum"], entry["location id"])
798
799         if found is None:
800             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
801         elif found is False and poolfile is not None:
802             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
803         else:
804             if poolfile is None:
805                 entry["files id"] = None
806             else:
807                 entry["files id"] = poolfile.file_id
808
809         # Check for packages that have moved from one component to another
810         entry['suite'] = suite
811         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
812         if res.rowcount > 0:
813             entry["othercomponents"] = res.fetchone()[0]
814
815     def check_files(self, action=True):
816         file_keys = self.pkg.files.keys()
817         holding = Holding()
818         cnf = Config()
819
820         if action:
821             cwd = os.getcwd()
822             os.chdir(self.pkg.directory)
823             for f in file_keys:
824                 ret = holding.copy_to_holding(f)
825                 if ret is not None:
826                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
827
828             os.chdir(cwd)
829
830         # check we already know the changes file
831         # [NB: this check must be done post-suite mapping]
832         base_filename = os.path.basename(self.pkg.changes_file)
833
834         session = DBConn().session()
835
836         try:
837             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
838             # if in the pool or in a queue other than unchecked, reject
839             if (dbc.in_queue is None) \
840                    or (dbc.in_queue is not None
841                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
842                 self.rejects.append("%s file already known to dak" % base_filename)
843         except NoResultFound, e:
844             # not known, good
845             pass
846
847         has_binaries = False
848         has_source = False
849
850         for f, entry in self.pkg.files.items():
851             # Ensure the file does not already exist in one of the accepted directories
852             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
853                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
854                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
855                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
856
857             if not re_taint_free.match(f):
858                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
859
860             # Check the file is readable
861             if os.access(f, os.R_OK) == 0:
862                 # When running in -n, copy_to_holding() won't have
863                 # generated the reject_message, so we need to.
864                 if action:
865                     if os.path.exists(f):
866                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
867                     else:
868                         # Don't directly reject, mark to check later to deal with orig's
869                         # we can find in the pool
870                         self.later_check_files.append(f)
871                 entry["type"] = "unreadable"
872                 continue
873
874             # If it's byhand skip remaining checks
875             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
876                 entry["byhand"] = 1
877                 entry["type"] = "byhand"
878
879             # Checks for a binary package...
880             elif re_isadeb.match(f):
881                 has_binaries = True
882                 entry["type"] = "deb"
883
884                 # This routine appends to self.rejects/warnings as appropriate
885                 self.binary_file_checks(f, session)
886
887             # Checks for a source package...
888             elif re_issource.match(f):
889                 has_source = True
890
891                 # This routine appends to self.rejects/warnings as appropriate
892                 self.source_file_checks(f, session)
893
894             # Not a binary or source package?  Assume byhand...
895             else:
896                 entry["byhand"] = 1
897                 entry["type"] = "byhand"
898
899             # Per-suite file checks
900             entry["oldfiles"] = {}
901             for suite in self.pkg.changes["distribution"].keys():
902                 self.per_suite_file_checks(f, suite, session)
903
904         session.close()
905
906         # If the .changes file says it has source, it must have source.
907         if self.pkg.changes["architecture"].has_key("source"):
908             if not has_source:
909                 self.rejects.append("no source found and Architecture line in changes mention source.")
910
911             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
912                 self.rejects.append("source only uploads are not supported.")
913
914     ###########################################################################
915     def check_dsc(self, action=True, session=None):
916         """Returns bool indicating whether or not the source changes are valid"""
917         # Ensure there is source to check
918         if not self.pkg.changes["architecture"].has_key("source"):
919             return True
920
921         # Find the .dsc
922         dsc_filename = None
923         for f, entry in self.pkg.files.items():
924             if entry["type"] == "dsc":
925                 if dsc_filename:
926                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
927                     return False
928                 else:
929                     dsc_filename = f
930
931         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
932         if not dsc_filename:
933             self.rejects.append("source uploads must contain a dsc file")
934             return False
935
936         # Parse the .dsc file
937         try:
938             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
939         except CantOpenError:
940             # if not -n copy_to_holding() will have done this for us...
941             if not action:
942                 self.rejects.append("%s: can't read file." % (dsc_filename))
943         except ParseChangesError, line:
944             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
945         except InvalidDscError, line:
946             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
947         except ChangesUnicodeError:
948             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
949
950         # Build up the file list of files mentioned by the .dsc
951         try:
952             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
953         except NoFilesFieldError:
954             self.rejects.append("%s: no Files: field." % (dsc_filename))
955             return False
956         except UnknownFormatError, format:
957             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
958             return False
959         except ParseChangesError, line:
960             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
961             return False
962
963         # Enforce mandatory fields
964         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
965             if not self.pkg.dsc.has_key(i):
966                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
967                 return False
968
969         # Validate the source and version fields
970         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
971             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
972         if not re_valid_version.match(self.pkg.dsc["version"]):
973             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
974
975         # Only a limited list of source formats are allowed in each suite
976         for dist in self.pkg.changes["distribution"].keys():
977             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
978             if self.pkg.dsc["format"] not in allowed:
979                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
980
981         # Validate the Maintainer field
982         try:
983             # We ignore the return value
984             fix_maintainer(self.pkg.dsc["maintainer"])
985         except ParseMaintError, msg:
986             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
987                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
988
989         # Validate the build-depends field(s)
990         for field_name in [ "build-depends", "build-depends-indep" ]:
991             field = self.pkg.dsc.get(field_name)
992             if field:
993                 # Have apt try to parse them...
994                 try:
995                     apt_pkg.ParseSrcDepends(field)
996                 except:
997                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
998
999         # Ensure the version number in the .dsc matches the version number in the .changes
1000         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1001         changes_version = self.pkg.files[dsc_filename]["version"]
1002
1003         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1004             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1005
1006         # Ensure the Files field contain only what's expected
1007         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1008
1009         # Ensure source is newer than existing source in target suites
1010         session = DBConn().session()
1011         self.check_source_against_db(dsc_filename, session)
1012         self.check_dsc_against_db(dsc_filename, session)
1013         session.close()
1014
1015         # Finally, check if we're missing any files
1016         for f in self.later_check_files:
1017             self.rejects.append("Could not find file %s references in changes" % f)
1018
1019         return True
1020
1021     ###########################################################################
1022
1023     def get_changelog_versions(self, source_dir):
1024         """Extracts a the source package and (optionally) grabs the
1025         version history out of debian/changelog for the BTS."""
1026
1027         cnf = Config()
1028
1029         # Find the .dsc (again)
1030         dsc_filename = None
1031         for f in self.pkg.files.keys():
1032             if self.pkg.files[f]["type"] == "dsc":
1033                 dsc_filename = f
1034
1035         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1036         if not dsc_filename:
1037             return
1038
1039         # Create a symlink mirror of the source files in our temporary directory
1040         for f in self.pkg.files.keys():
1041             m = re_issource.match(f)
1042             if m:
1043                 src = os.path.join(source_dir, f)
1044                 # If a file is missing for whatever reason, give up.
1045                 if not os.path.exists(src):
1046                     return
1047                 ftype = m.group(3)
1048                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1049                    self.pkg.orig_files[f].has_key("path"):
1050                     continue
1051                 dest = os.path.join(os.getcwd(), f)
1052                 os.symlink(src, dest)
1053
1054         # If the orig files are not a part of the upload, create symlinks to the
1055         # existing copies.
1056         for orig_file in self.pkg.orig_files.keys():
1057             if not self.pkg.orig_files[orig_file].has_key("path"):
1058                 continue
1059             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1060             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1061
1062         # Extract the source
1063         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1064         (result, output) = commands.getstatusoutput(cmd)
1065         if (result != 0):
1066             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1067             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1068             return
1069
1070         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1071             return
1072
1073         # Get the upstream version
1074         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1075         if re_strip_revision.search(upstr_version):
1076             upstr_version = re_strip_revision.sub('', upstr_version)
1077
1078         # Ensure the changelog file exists
1079         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1080         if not os.path.exists(changelog_filename):
1081             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1082             return
1083
1084         # Parse the changelog
1085         self.pkg.dsc["bts changelog"] = ""
1086         changelog_file = utils.open_file(changelog_filename)
1087         for line in changelog_file.readlines():
1088             m = re_changelog_versions.match(line)
1089             if m:
1090                 self.pkg.dsc["bts changelog"] += line
1091         changelog_file.close()
1092
1093         # Check we found at least one revision in the changelog
1094         if not self.pkg.dsc["bts changelog"]:
1095             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1096
1097     def check_source(self):
1098         # Bail out if:
1099         #    a) there's no source
1100         if not self.pkg.changes["architecture"].has_key("source"):
1101             return
1102
1103         tmpdir = utils.temp_dirname()
1104
1105         # Move into the temporary directory
1106         cwd = os.getcwd()
1107         os.chdir(tmpdir)
1108
1109         # Get the changelog version history
1110         self.get_changelog_versions(cwd)
1111
1112         # Move back and cleanup the temporary tree
1113         os.chdir(cwd)
1114
1115         try:
1116             shutil.rmtree(tmpdir)
1117         except OSError, e:
1118             if e.errno != errno.EACCES:
1119                 print "foobar"
1120                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1121
1122             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1123             # We probably have u-r or u-w directories so chmod everything
1124             # and try again.
1125             cmd = "chmod -R u+rwx %s" % (tmpdir)
1126             result = os.system(cmd)
1127             if result != 0:
1128                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1129             shutil.rmtree(tmpdir)
1130         except Exception, e:
1131             print "foobar2 (%s)" % e
1132             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1133
1134     ###########################################################################
1135     def ensure_hashes(self):
1136         # Make sure we recognise the format of the Files: field in the .changes
1137         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1138         if len(format) == 2:
1139             format = int(format[0]), int(format[1])
1140         else:
1141             format = int(float(format[0])), 0
1142
1143         # We need to deal with the original changes blob, as the fields we need
1144         # might not be in the changes dict serialised into the .dak anymore.
1145         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1146
1147         # Copy the checksums over to the current changes dict.  This will keep
1148         # the existing modifications to it intact.
1149         for field in orig_changes:
1150             if field.startswith('checksums-'):
1151                 self.pkg.changes[field] = orig_changes[field]
1152
1153         # Check for unsupported hashes
1154         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1155             self.rejects.append(j)
1156
1157         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1158             self.rejects.append(j)
1159
1160         # We have to calculate the hash if we have an earlier changes version than
1161         # the hash appears in rather than require it exist in the changes file
1162         for hashname, hashfunc, version in utils.known_hashes:
1163             # TODO: Move _ensure_changes_hash into this class
1164             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1165                 self.rejects.append(j)
1166             if "source" in self.pkg.changes["architecture"]:
1167                 # TODO: Move _ensure_dsc_hash into this class
1168                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1169                     self.rejects.append(j)
1170
1171     def check_hashes(self):
1172         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1173             self.rejects.append(m)
1174
1175         for m in utils.check_size(".changes", self.pkg.files):
1176             self.rejects.append(m)
1177
1178         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1179             self.rejects.append(m)
1180
1181         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1182             self.rejects.append(m)
1183
1184         self.ensure_hashes()
1185
1186     ###########################################################################
1187
1188     def ensure_orig(self, target_dir='.', session=None):
1189         """
1190         Ensures that all orig files mentioned in the changes file are present
1191         in target_dir. If they do not exist, they are symlinked into place.
1192
1193         An list containing the symlinks that were created are returned (so they
1194         can be removed).
1195         """
1196
1197         symlinked = []
1198         cnf = Config()
1199
1200         for filename, entry in self.pkg.dsc_files.iteritems():
1201             if not re_is_orig_source.match(filename):
1202                 # File is not an orig; ignore
1203                 continue
1204
1205             if os.path.exists(filename):
1206                 # File exists, no need to continue
1207                 continue
1208
1209             def symlink_if_valid(path):
1210                 f = utils.open_file(path)
1211                 md5sum = apt_pkg.md5sum(f)
1212                 f.close()
1213
1214                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1215                 expected = (int(entry['size']), entry['md5sum'])
1216
1217                 if fingerprint != expected:
1218                     return False
1219
1220                 dest = os.path.join(target_dir, filename)
1221
1222                 os.symlink(path, dest)
1223                 symlinked.append(dest)
1224
1225                 return True
1226
1227             session_ = session
1228             if session is None:
1229                 session_ = DBConn().session()
1230
1231             found = False
1232
1233             # Look in the pool
1234             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1235                 poolfile_path = os.path.join(
1236                     poolfile.location.path, poolfile.filename
1237                 )
1238
1239                 if symlink_if_valid(poolfile_path):
1240                     found = True
1241                     break
1242
1243             if session is None:
1244                 session_.close()
1245
1246             if found:
1247                 continue
1248
1249             # Look in some other queues for the file
1250             queues = ('New', 'Byhand', 'ProposedUpdates',
1251                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1252
1253             for queue in queues:
1254                 if not cnf.get('Dir::Queue::%s' % queue):
1255                     continue
1256
1257                 queuefile_path = os.path.join(
1258                     cnf['Dir::Queue::%s' % queue], filename
1259                 )
1260
1261                 if not os.path.exists(queuefile_path):
1262                     # Does not exist in this queue
1263                     continue
1264
1265                 if symlink_if_valid(queuefile_path):
1266                     break
1267
1268         return symlinked
1269
1270     ###########################################################################
1271
1272     def check_lintian(self):
1273         """
1274         Extends self.rejects by checking the output of lintian against tags
1275         specified in Dinstall::LintianTags.
1276         """
1277
1278         cnf = Config()
1279
1280         # Don't reject binary uploads
1281         if not self.pkg.changes['architecture'].has_key('source'):
1282             return
1283
1284         # Only check some distributions
1285         for dist in ('unstable', 'experimental'):
1286             if dist in self.pkg.changes['distribution']:
1287                 break
1288         else:
1289             return
1290
1291         # If we do not have a tagfile, don't do anything
1292         tagfile = cnf.get("Dinstall::LintianTags")
1293         if tagfile is None:
1294             return
1295
1296         # Parse the yaml file
1297         sourcefile = file(tagfile, 'r')
1298         sourcecontent = sourcefile.read()
1299         sourcefile.close()
1300
1301         try:
1302             lintiantags = yaml.load(sourcecontent)['lintian']
1303         except yaml.YAMLError, msg:
1304             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1305             return
1306
1307         # Try and find all orig mentioned in the .dsc
1308         symlinked = self.ensure_orig()
1309
1310         # Setup the input file for lintian
1311         fd, temp_filename = utils.temp_filename()
1312         temptagfile = os.fdopen(fd, 'w')
1313         for tags in lintiantags.values():
1314             temptagfile.writelines(['%s\n' % x for x in tags])
1315         temptagfile.close()
1316
1317         try:
1318             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1319                 (temp_filename, self.pkg.changes_file)
1320
1321             result, output = commands.getstatusoutput(cmd)
1322         finally:
1323             # Remove our tempfile and any symlinks we created
1324             os.unlink(temp_filename)
1325
1326             for symlink in symlinked:
1327                 os.unlink(symlink)
1328
1329         if result == 2:
1330             utils.warn("lintian failed for %s [return code: %s]." % \
1331                 (self.pkg.changes_file, result))
1332             utils.warn(utils.prefix_multi_line_string(output, \
1333                 " [possible output:] "))
1334
1335         def log(*txt):
1336             if self.logger:
1337                 self.logger.log(
1338                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1339                 )
1340
1341         # Generate messages
1342         parsed_tags = parse_lintian_output(output)
1343         self.rejects.extend(
1344             generate_reject_messages(parsed_tags, lintiantags, log=log)
1345         )
1346
1347     ###########################################################################
1348     def check_urgency(self):
1349         cnf = Config()
1350         if self.pkg.changes["architecture"].has_key("source"):
1351             if not self.pkg.changes.has_key("urgency"):
1352                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1353             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1354             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1355                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1356                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1357                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1358
1359     ###########################################################################
1360
1361     # Sanity check the time stamps of files inside debs.
1362     # [Files in the near future cause ugly warnings and extreme time
1363     #  travel can cause errors on extraction]
1364
1365     def check_timestamps(self):
1366         Cnf = Config()
1367
1368         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1369         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1370         tar = TarTime(future_cutoff, past_cutoff)
1371
1372         for filename, entry in self.pkg.files.items():
1373             if entry["type"] == "deb":
1374                 tar.reset()
1375                 try:
1376                     deb_file = utils.open_file(filename)
1377                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1378                     deb_file.seek(0)
1379                     try:
1380                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1381                     except SystemError, e:
1382                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1383                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1384                             raise
1385                         deb_file.seek(0)
1386                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1387
1388                     deb_file.close()
1389
1390                     future_files = tar.future_files.keys()
1391                     if future_files:
1392                         num_future_files = len(future_files)
1393                         future_file = future_files[0]
1394                         future_date = tar.future_files[future_file]
1395                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1396                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1397
1398                     ancient_files = tar.ancient_files.keys()
1399                     if ancient_files:
1400                         num_ancient_files = len(ancient_files)
1401                         ancient_file = ancient_files[0]
1402                         ancient_date = tar.ancient_files[ancient_file]
1403                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1404                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1405                 except:
1406                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1407
1408     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1409         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1410             sponsored = False
1411         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1412             sponsored = False
1413             if uid_name == "":
1414                 sponsored = True
1415         else:
1416             sponsored = True
1417             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1418                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1419                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1420                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1421                         self.pkg.changes["sponsoremail"] = uid_email
1422
1423         return sponsored
1424
1425
1426     ###########################################################################
1427     # check_signed_by_key checks
1428     ###########################################################################
1429
1430     def check_signed_by_key(self):
1431         """Ensure the .changes is signed by an authorized uploader."""
1432         session = DBConn().session()
1433
1434         # First of all we check that the person has proper upload permissions
1435         # and that this upload isn't blocked
1436         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1437
1438         if fpr is None:
1439             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1440             return
1441
1442         # TODO: Check that import-keyring adds UIDs properly
1443         if not fpr.uid:
1444             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1445             return
1446
1447         # Check that the fingerprint which uploaded has permission to do so
1448         self.check_upload_permissions(fpr, session)
1449
1450         # Check that this package is not in a transition
1451         self.check_transition(session)
1452
1453         session.close()
1454
1455
1456     def check_upload_permissions(self, fpr, session):
1457         # Check any one-off upload blocks
1458         self.check_upload_blocks(fpr, session)
1459
1460         # Start with DM as a special case
1461         # DM is a special case unfortunately, so we check it first
1462         # (keys with no source access get more access than DMs in one
1463         #  way; DMs can only upload for their packages whether source
1464         #  or binary, whereas keys with no access might be able to
1465         #  upload some binaries)
1466         if fpr.source_acl.access_level == 'dm':
1467             self.check_dm_upload(fpr, session)
1468         else:
1469             # Check source-based permissions for other types
1470             if self.pkg.changes["architecture"].has_key("source") and \
1471                 fpr.source_acl.access_level is None:
1472                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1473                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1474                 self.rejects.append(rej)
1475                 return
1476             # If not a DM, we allow full upload rights
1477             uid_email = "%s@debian.org" % (fpr.uid.uid)
1478             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1479
1480
1481         # Check binary upload permissions
1482         # By this point we know that DMs can't have got here unless they
1483         # are allowed to deal with the package concerned so just apply
1484         # normal checks
1485         if fpr.binary_acl.access_level == 'full':
1486             return
1487
1488         # Otherwise we're in the map case
1489         tmparches = self.pkg.changes["architecture"].copy()
1490         tmparches.pop('source', None)
1491
1492         for bam in fpr.binary_acl_map:
1493             tmparches.pop(bam.architecture.arch_string, None)
1494
1495         if len(tmparches.keys()) > 0:
1496             if fpr.binary_reject:
1497                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1498                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1499                 self.rejects.append(rej)
1500             else:
1501                 # TODO: This is where we'll implement reject vs throw away binaries later
1502                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1503                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1504                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1505                 self.rejects.append(rej)
1506
1507
1508     def check_upload_blocks(self, fpr, session):
1509         """Check whether any upload blocks apply to this source, source
1510            version, uid / fpr combination"""
1511
1512         def block_rej_template(fb):
1513             rej = 'Manual upload block in place for package %s' % fb.source
1514             if fb.version is not None:
1515                 rej += ', version %s' % fb.version
1516             return rej
1517
1518         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1519             # version is None if the block applies to all versions
1520             if fb.version is None or fb.version == self.pkg.changes['version']:
1521                 # Check both fpr and uid - either is enough to cause a reject
1522                 if fb.fpr is not None:
1523                     if fb.fpr.fingerprint == fpr.fingerprint:
1524                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1525                 if fb.uid is not None:
1526                     if fb.uid == fpr.uid:
1527                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1528
1529
1530     def check_dm_upload(self, fpr, session):
1531         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1532         ## none of the uploaded packages are NEW
1533         rej = False
1534         for f in self.pkg.files.keys():
1535             if self.pkg.files[f].has_key("byhand"):
1536                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1537                 rej = True
1538             if self.pkg.files[f].has_key("new"):
1539                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1540                 rej = True
1541
1542         if rej:
1543             return
1544
1545         ## the most recent version of the package uploaded to unstable or
1546         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1547         ## section of its control file
1548         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1549         q = q.join(SrcAssociation)
1550         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1551         q = q.order_by(desc('source.version')).limit(1)
1552
1553         r = q.all()
1554
1555         if len(r) != 1:
1556             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1557             self.rejects.append(rej)
1558             return
1559
1560         r = r[0]
1561         if not r.dm_upload_allowed:
1562             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1563             self.rejects.append(rej)
1564             return
1565
1566         ## the Maintainer: field of the uploaded .changes file corresponds with
1567         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1568         ## uploads)
1569         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1570             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1571
1572         ## the most recent version of the package uploaded to unstable or
1573         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1574         ## non-developer maintainers cannot NMU or hijack packages)
1575
1576         # srcuploaders includes the maintainer
1577         accept = False
1578         for sup in r.srcuploaders:
1579             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1580             # Eww - I hope we never have two people with the same name in Debian
1581             if email == fpr.uid.uid or name == fpr.uid.name:
1582                 accept = True
1583                 break
1584
1585         if not accept:
1586             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1587             return
1588
1589         ## none of the packages are being taken over from other source packages
1590         for b in self.pkg.changes["binary"].keys():
1591             for suite in self.pkg.changes["distribution"].keys():
1592                 q = session.query(DBSource)
1593                 q = q.join(DBBinary).filter_by(package=b)
1594                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1595
1596                 for s in q.all():
1597                     if s.source != self.pkg.changes["source"]:
1598                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1599
1600
1601
1602     def check_transition(self, session):
1603         cnf = Config()
1604
1605         sourcepkg = self.pkg.changes["source"]
1606
1607         # No sourceful upload -> no need to do anything else, direct return
1608         # We also work with unstable uploads, not experimental or those going to some
1609         # proposed-updates queue
1610         if "source" not in self.pkg.changes["architecture"] or \
1611            "unstable" not in self.pkg.changes["distribution"]:
1612             return
1613
1614         # Also only check if there is a file defined (and existant) with
1615         # checks.
1616         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1617         if transpath == "" or not os.path.exists(transpath):
1618             return
1619
1620         # Parse the yaml file
1621         sourcefile = file(transpath, 'r')
1622         sourcecontent = sourcefile.read()
1623         try:
1624             transitions = yaml.load(sourcecontent)
1625         except yaml.YAMLError, msg:
1626             # This shouldn't happen, there is a wrapper to edit the file which
1627             # checks it, but we prefer to be safe than ending up rejecting
1628             # everything.
1629             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1630             return
1631
1632         # Now look through all defined transitions
1633         for trans in transitions:
1634             t = transitions[trans]
1635             source = t["source"]
1636             expected = t["new"]
1637
1638             # Will be None if nothing is in testing.
1639             current = get_source_in_suite(source, "testing", session)
1640             if current is not None:
1641                 compare = apt_pkg.VersionCompare(current.version, expected)
1642
1643             if current is None or compare < 0:
1644                 # This is still valid, the current version in testing is older than
1645                 # the new version we wait for, or there is none in testing yet
1646
1647                 # Check if the source we look at is affected by this.
1648                 if sourcepkg in t['packages']:
1649                     # The source is affected, lets reject it.
1650
1651                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1652                         sourcepkg, trans)
1653
1654                     if current is not None:
1655                         currentlymsg = "at version %s" % (current.version)
1656                     else:
1657                         currentlymsg = "not present in testing"
1658
1659                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1660
1661                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1662 is part of a testing transition designed to get %s migrated (it is
1663 currently %s, we need version %s).  This transition is managed by the
1664 Release Team, and %s is the Release-Team member responsible for it.
1665 Please mail debian-release@lists.debian.org or contact %s directly if you
1666 need further assistance.  You might want to upload to experimental until this
1667 transition is done."""
1668                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1669
1670                     self.rejects.append(rejectmsg)
1671                     return
1672
1673     ###########################################################################
1674     # End check_signed_by_key checks
1675     ###########################################################################
1676
1677     def build_summaries(self):
1678         """ Build a summary of changes the upload introduces. """
1679
1680         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1681
1682         short_summary = summary
1683
1684         # This is for direport's benefit...
1685         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1686
1687         if byhand or new:
1688             summary += "Changes: " + f
1689
1690         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1691
1692         summary += self.announce(short_summary, 0)
1693
1694         return (summary, short_summary)
1695
1696     ###########################################################################
1697
1698     def close_bugs(self, summary, action):
1699         """
1700         Send mail to close bugs as instructed by the closes field in the changes file.
1701         Also add a line to summary if any work was done.
1702
1703         @type summary: string
1704         @param summary: summary text, as given by L{build_summaries}
1705
1706         @type action: bool
1707         @param action: Set to false no real action will be done.
1708
1709         @rtype: string
1710         @return: summary. If action was taken, extended by the list of closed bugs.
1711
1712         """
1713
1714         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1715
1716         bugs = self.pkg.changes["closes"].keys()
1717
1718         if not bugs:
1719             return summary
1720
1721         bugs.sort()
1722         summary += "Closing bugs: "
1723         for bug in bugs:
1724             summary += "%s " % (bug)
1725             if action:
1726                 self.update_subst()
1727                 self.Subst["__BUG_NUMBER__"] = bug
1728                 if self.pkg.changes["distribution"].has_key("stable"):
1729                     self.Subst["__STABLE_WARNING__"] = """
1730 Note that this package is not part of the released stable Debian
1731 distribution.  It may have dependencies on other unreleased software,
1732 or other instabilities.  Please take care if you wish to install it.
1733 The update will eventually make its way into the next released Debian
1734 distribution."""
1735                 else:
1736                     self.Subst["__STABLE_WARNING__"] = ""
1737                 mail_message = utils.TemplateSubst(self.Subst, template)
1738                 utils.send_mail(mail_message)
1739
1740                 # Clear up after ourselves
1741                 del self.Subst["__BUG_NUMBER__"]
1742                 del self.Subst["__STABLE_WARNING__"]
1743
1744         if action and self.logger:
1745             self.logger.log(["closing bugs"] + bugs)
1746
1747         summary += "\n"
1748
1749         return summary
1750
1751     ###########################################################################
1752
1753     def announce(self, short_summary, action):
1754         """
1755         Send an announce mail about a new upload.
1756
1757         @type short_summary: string
1758         @param short_summary: Short summary text to include in the mail
1759
1760         @type action: bool
1761         @param action: Set to false no real action will be done.
1762
1763         @rtype: string
1764         @return: Textstring about action taken.
1765
1766         """
1767
1768         cnf = Config()
1769         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1770
1771         # Only do announcements for source uploads with a recent dpkg-dev installed
1772         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1773            self.pkg.changes["architecture"].has_key("source"):
1774             return ""
1775
1776         lists_done = {}
1777         summary = ""
1778
1779         self.Subst["__SHORT_SUMMARY__"] = short_summary
1780
1781         for dist in self.pkg.changes["distribution"].keys():
1782             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1783             if announce_list == "" or lists_done.has_key(announce_list):
1784                 continue
1785
1786             lists_done[announce_list] = 1
1787             summary += "Announcing to %s\n" % (announce_list)
1788
1789             if action:
1790                 self.update_subst()
1791                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1792                 if cnf.get("Dinstall::TrackingServer") and \
1793                    self.pkg.changes["architecture"].has_key("source"):
1794                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1795                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1796
1797                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1798                 utils.send_mail(mail_message)
1799
1800                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1801
1802         if cnf.FindB("Dinstall::CloseBugs"):
1803             summary = self.close_bugs(summary, action)
1804
1805         del self.Subst["__SHORT_SUMMARY__"]
1806
1807         return summary
1808
1809     ###########################################################################
1810     @session_wrapper
1811     def accept (self, summary, short_summary, session=None):
1812         """
1813         Accept an upload.
1814
1815         This moves all files referenced from the .changes into the pool,
1816         sends the accepted mail, announces to lists, closes bugs and
1817         also checks for override disparities. If enabled it will write out
1818         the version history for the BTS Version Tracking and will finally call
1819         L{queue_build}.
1820
1821         @type summary: string
1822         @param summary: Summary text
1823
1824         @type short_summary: string
1825         @param short_summary: Short summary
1826         """
1827
1828         cnf = Config()
1829         stats = SummaryStats()
1830
1831         print "Installing."
1832         self.logger.log(["installing changes", self.pkg.changes_file])
1833
1834         poolfiles = []
1835
1836         # Add the .dsc file to the DB first
1837         for newfile, entry in self.pkg.files.items():
1838             if entry["type"] == "dsc":
1839                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1840                 for j in pfs:
1841                     poolfiles.append(j)
1842
1843         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1844         for newfile, entry in self.pkg.files.items():
1845             if entry["type"] == "deb":
1846                 poolfiles.append(add_deb_to_db(self, newfile, session))
1847
1848         # If this is a sourceful diff only upload that is moving
1849         # cross-component we need to copy the .orig files into the new
1850         # component too for the same reasons as above.
1851         # XXX: mhy: I think this should be in add_dsc_to_db
1852         if self.pkg.changes["architecture"].has_key("source"):
1853             for orig_file in self.pkg.orig_files.keys():
1854                 if not self.pkg.orig_files[orig_file].has_key("id"):
1855                     continue # Skip if it's not in the pool
1856                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1857                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1858                     continue # Skip if the location didn't change
1859
1860                 # Do the move
1861                 oldf = get_poolfile_by_id(orig_file_id, session)
1862                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1863                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1864                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1865
1866                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1867
1868                 # TODO: Care about size/md5sum collisions etc
1869                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1870
1871                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1872                 if newf is None:
1873                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1874                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1875
1876                     session.flush()
1877
1878                     # Don't reference the old file from this changes
1879                     for p in poolfiles:
1880                         if p.file_id == oldf.file_id:
1881                             poolfiles.remove(p)
1882
1883                     poolfiles.append(newf)
1884
1885                     # Fix up the DSC references
1886                     toremove = []
1887
1888                     for df in source.srcfiles:
1889                         if df.poolfile.file_id == oldf.file_id:
1890                             # Add a new DSC entry and mark the old one for deletion
1891                             # Don't do it in the loop so we don't change the thing we're iterating over
1892                             newdscf = DSCFile()
1893                             newdscf.source_id = source.source_id
1894                             newdscf.poolfile_id = newf.file_id
1895                             session.add(newdscf)
1896
1897                             toremove.append(df)
1898
1899                     for df in toremove:
1900                         session.delete(df)
1901
1902                     # Flush our changes
1903                     session.flush()
1904
1905                     # Make sure that our source object is up-to-date
1906                     session.expire(source)
1907
1908         # Add changelog information to the database
1909         self.store_changelog()
1910
1911         # Install the files into the pool
1912         for newfile, entry in self.pkg.files.items():
1913             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1914             utils.move(newfile, destination)
1915             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1916             stats.accept_bytes += float(entry["size"])
1917
1918         # Copy the .changes file across for suite which need it.
1919         copy_changes = {}
1920         for suite_name in self.pkg.changes["distribution"].keys():
1921             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1922                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1923
1924         for dest in copy_changes.keys():
1925             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1926
1927         # We're done - commit the database changes
1928         session.commit()
1929         # Our SQL session will automatically start a new transaction after
1930         # the last commit
1931
1932         # Move the .changes into the 'done' directory
1933         utils.move(self.pkg.changes_file,
1934                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1935
1936         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1937             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1938
1939         self.update_subst()
1940         self.Subst["__SUITE__"] = ""
1941         self.Subst["__SUMMARY__"] = summary
1942         mail_message = utils.TemplateSubst(self.Subst,
1943                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1944         utils.send_mail(mail_message)
1945         self.announce(short_summary, 1)
1946
1947         ## Helper stuff for DebBugs Version Tracking
1948         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1949             if self.pkg.changes["architecture"].has_key("source"):
1950                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1951                 version_history = os.fdopen(fd, 'w')
1952                 version_history.write(self.pkg.dsc["bts changelog"])
1953                 version_history.close()
1954                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1955                                       self.pkg.changes_file[:-8]+".versions")
1956                 os.rename(temp_filename, filename)
1957                 os.chmod(filename, 0644)
1958
1959             # Write out the binary -> source mapping.
1960             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1961             debinfo = os.fdopen(fd, 'w')
1962             for name, entry in sorted(self.pkg.files.items()):
1963                 if entry["type"] == "deb":
1964                     line = " ".join([entry["package"], entry["version"],
1965                                      entry["architecture"], entry["source package"],
1966                                      entry["source version"]])
1967                     debinfo.write(line+"\n")
1968             debinfo.close()
1969             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1970                                   self.pkg.changes_file[:-8]+".debinfo")
1971             os.rename(temp_filename, filename)
1972             os.chmod(filename, 0644)
1973
1974         session.commit()
1975
1976         # Set up our copy queues (e.g. buildd queues)
1977         for suite_name in self.pkg.changes["distribution"].keys():
1978             suite = get_suite(suite_name, session)
1979             for q in suite.copy_queues:
1980                 for f in poolfiles:
1981                     q.add_file_from_pool(f)
1982
1983         session.commit()
1984
1985         # Finally...
1986         stats.accept_count += 1
1987
1988     def check_override(self):
1989         """
1990         Checks override entries for validity. Mails "Override disparity" warnings,
1991         if that feature is enabled.
1992
1993         Abandons the check if
1994           - override disparity checks are disabled
1995           - mail sending is disabled
1996         """
1997
1998         cnf = Config()
1999
2000         # Abandon the check if override disparity checks have been disabled
2001         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2002             return
2003
2004         summary = self.pkg.check_override()
2005
2006         if summary == "":
2007             return
2008
2009         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2010
2011         self.update_subst()
2012         self.Subst["__SUMMARY__"] = summary
2013         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2014         utils.send_mail(mail_message)
2015         del self.Subst["__SUMMARY__"]
2016
2017     ###########################################################################
2018
2019     def remove(self, from_dir=None):
2020         """
2021         Used (for instance) in p-u to remove the package from unchecked
2022
2023         Also removes the package from holding area.
2024         """
2025         if from_dir is None:
2026             from_dir = self.pkg.directory
2027         h = Holding()
2028
2029         for f in self.pkg.files.keys():
2030             os.unlink(os.path.join(from_dir, f))
2031             if os.path.exists(os.path.join(h.holding_dir, f)):
2032                 os.unlink(os.path.join(h.holding_dir, f))
2033
2034         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2035         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2036             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2037
2038     ###########################################################################
2039
2040     def move_to_queue (self, queue):
2041         """
2042         Move files to a destination queue using the permissions in the table
2043         """
2044         h = Holding()
2045         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2046                    queue.path, perms=int(queue.change_perms, 8))
2047         for f in self.pkg.files.keys():
2048             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2049
2050     ###########################################################################
2051
2052     def force_reject(self, reject_files):
2053         """
2054         Forcefully move files from the current directory to the
2055         reject directory.  If any file already exists in the reject
2056         directory it will be moved to the morgue to make way for
2057         the new file.
2058
2059         @type reject_files: dict
2060         @param reject_files: file dictionary
2061
2062         """
2063
2064         cnf = Config()
2065
2066         for file_entry in reject_files:
2067             # Skip any files which don't exist or which we don't have permission to copy.
2068             if os.access(file_entry, os.R_OK) == 0:
2069                 continue
2070
2071             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2072
2073             try:
2074                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2075             except OSError, e:
2076                 # File exists?  Let's find a new name by adding a number
2077                 if e.errno == errno.EEXIST:
2078                     try:
2079                         dest_file = utils.find_next_free(dest_file, 255)
2080                     except NoFreeFilenameError:
2081                         # Something's either gone badly Pete Tong, or
2082                         # someone is trying to exploit us.
2083                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2084                         return
2085
2086                     # Make sure we really got it
2087                     try:
2088                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2089                     except OSError, e:
2090                         # Likewise
2091                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2092                         return
2093                 else:
2094                     raise
2095             # If we got here, we own the destination file, so we can
2096             # safely overwrite it.
2097             utils.move(file_entry, dest_file, 1, perms=0660)
2098             os.close(dest_fd)
2099
2100     ###########################################################################
2101     def do_reject (self, manual=0, reject_message="", notes=""):
2102         """
2103         Reject an upload. If called without a reject message or C{manual} is
2104         true, spawn an editor so the user can write one.
2105
2106         @type manual: bool
2107         @param manual: manual or automated rejection
2108
2109         @type reject_message: string
2110         @param reject_message: A reject message
2111
2112         @return: 0
2113
2114         """
2115         # If we weren't given a manual rejection message, spawn an
2116         # editor so the user can add one in...
2117         if manual and not reject_message:
2118             (fd, temp_filename) = utils.temp_filename()
2119             temp_file = os.fdopen(fd, 'w')
2120             if len(notes) > 0:
2121                 for note in notes:
2122                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2123                                     % (note.author, note.version, note.notedate, note.comment))
2124             temp_file.close()
2125             editor = os.environ.get("EDITOR","vi")
2126             answer = 'E'
2127             while answer == 'E':
2128                 os.system("%s %s" % (editor, temp_filename))
2129                 temp_fh = utils.open_file(temp_filename)
2130                 reject_message = "".join(temp_fh.readlines())
2131                 temp_fh.close()
2132                 print "Reject message:"
2133                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2134                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2135                 answer = "XXX"
2136                 while prompt.find(answer) == -1:
2137                     answer = utils.our_raw_input(prompt)
2138                     m = re_default_answer.search(prompt)
2139                     if answer == "":
2140                         answer = m.group(1)
2141                     answer = answer[:1].upper()
2142             os.unlink(temp_filename)
2143             if answer == 'A':
2144                 return 1
2145             elif answer == 'Q':
2146                 sys.exit(0)
2147
2148         print "Rejecting.\n"
2149
2150         cnf = Config()
2151
2152         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2153         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2154
2155         # Move all the files into the reject directory
2156         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2157         self.force_reject(reject_files)
2158
2159         # If we fail here someone is probably trying to exploit the race
2160         # so let's just raise an exception ...
2161         if os.path.exists(reason_filename):
2162             os.unlink(reason_filename)
2163         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2164
2165         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2166
2167         self.update_subst()
2168         if not manual:
2169             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2170             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2171             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2172             os.write(reason_fd, reject_message)
2173             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2174         else:
2175             # Build up the rejection email
2176             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2177             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2178             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2179             self.Subst["__REJECT_MESSAGE__"] = ""
2180             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2181             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2182             # Write the rejection email out as the <foo>.reason file
2183             os.write(reason_fd, reject_mail_message)
2184
2185         del self.Subst["__REJECTOR_ADDRESS__"]
2186         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2187         del self.Subst["__CC__"]
2188
2189         os.close(reason_fd)
2190
2191         # Send the rejection mail
2192         utils.send_mail(reject_mail_message)
2193
2194         if self.logger:
2195             self.logger.log(["rejected", self.pkg.changes_file])
2196
2197         return 0
2198
2199     ################################################################################
2200     def in_override_p(self, package, component, suite, binary_type, filename, session):
2201         """
2202         Check if a package already has override entries in the DB
2203
2204         @type package: string
2205         @param package: package name
2206
2207         @type component: string
2208         @param component: database id of the component
2209
2210         @type suite: int
2211         @param suite: database id of the suite
2212
2213         @type binary_type: string
2214         @param binary_type: type of the package
2215
2216         @type filename: string
2217         @param filename: filename we check
2218
2219         @return: the database result. But noone cares anyway.
2220
2221         """
2222
2223         cnf = Config()
2224
2225         if binary_type == "": # must be source
2226             file_type = "dsc"
2227         else:
2228             file_type = binary_type
2229
2230         # Override suite name; used for example with proposed-updates
2231         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2232             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2233
2234         result = get_override(package, suite, component, file_type, session)
2235
2236         # If checking for a source package fall back on the binary override type
2237         if file_type == "dsc" and len(result) < 1:
2238             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2239
2240         # Remember the section and priority so we can check them later if appropriate
2241         if len(result) > 0:
2242             result = result[0]
2243             self.pkg.files[filename]["override section"] = result.section.section
2244             self.pkg.files[filename]["override priority"] = result.priority.priority
2245             return result
2246
2247         return None
2248
2249     ################################################################################
2250     def get_anyversion(self, sv_list, suite):
2251         """
2252         @type sv_list: list
2253         @param sv_list: list of (suite, version) tuples to check
2254
2255         @type suite: string
2256         @param suite: suite name
2257
2258         Description: TODO
2259         """
2260         Cnf = Config()
2261         anyversion = None
2262         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2263         for (s, v) in sv_list:
2264             if s in [ x.lower() for x in anysuite ]:
2265                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2266                     anyversion = v
2267
2268         return anyversion
2269
2270     ################################################################################
2271
2272     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2273         """
2274         @type sv_list: list
2275         @param sv_list: list of (suite, version) tuples to check
2276
2277         @type filename: string
2278         @param filename: XXX
2279
2280         @type new_version: string
2281         @param new_version: XXX
2282
2283         Ensure versions are newer than existing packages in target
2284         suites and that cross-suite version checking rules as
2285         set out in the conf file are satisfied.
2286         """
2287
2288         cnf = Config()
2289
2290         # Check versions for each target suite
2291         for target_suite in self.pkg.changes["distribution"].keys():
2292             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2293             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2294
2295             # Enforce "must be newer than target suite" even if conffile omits it
2296             if target_suite not in must_be_newer_than:
2297                 must_be_newer_than.append(target_suite)
2298
2299             for (suite, existent_version) in sv_list:
2300                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2301
2302                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2303                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2304
2305                 if suite in must_be_older_than and vercmp > -1:
2306                     cansave = 0
2307
2308                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2309                         # we really use the other suite, ignoring the conflicting one ...
2310                         addsuite = self.pkg.changes["distribution-version"][suite]
2311
2312                         add_version = self.get_anyversion(sv_list, addsuite)
2313                         target_version = self.get_anyversion(sv_list, target_suite)
2314
2315                         if not add_version:
2316                             # not add_version can only happen if we map to a suite
2317                             # that doesn't enhance the suite we're propup'ing from.
2318                             # so "propup-ver x a b c; map a d" is a problem only if
2319                             # d doesn't enhance a.
2320                             #
2321                             # i think we could always propagate in this case, rather
2322                             # than complaining. either way, this isn't a REJECT issue
2323                             #
2324                             # And - we really should complain to the dorks who configured dak
2325                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2326                             self.pkg.changes.setdefault("propdistribution", {})
2327                             self.pkg.changes["propdistribution"][addsuite] = 1
2328                             cansave = 1
2329                         elif not target_version:
2330                             # not targets_version is true when the package is NEW
2331                             # we could just stick with the "...old version..." REJECT
2332                             # for this, I think.
2333                             self.rejects.append("Won't propogate NEW packages.")
2334                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2335                             # propogation would be redundant. no need to reject though.
2336                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2337                             cansave = 1
2338                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2339                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2340                             # propogate!!
2341                             self.warnings.append("Propogating upload to %s" % (addsuite))
2342                             self.pkg.changes.setdefault("propdistribution", {})
2343                             self.pkg.changes["propdistribution"][addsuite] = 1
2344                             cansave = 1
2345
2346                     if not cansave:
2347                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2348
2349     ################################################################################
2350     def check_binary_against_db(self, filename, session):
2351         # Ensure version is sane
2352         q = session.query(BinAssociation)
2353         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2354         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2355
2356         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2357                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2358
2359         # Check for any existing copies of the file
2360         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2361         q = q.filter_by(version=self.pkg.files[filename]["version"])
2362         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2363
2364         if q.count() > 0:
2365             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2366
2367     ################################################################################
2368
2369     def check_source_against_db(self, filename, session):
2370         source = self.pkg.dsc.get("source")
2371         version = self.pkg.dsc.get("version")
2372
2373         # Ensure version is sane
2374         q = session.query(SrcAssociation)
2375         q = q.join(DBSource).filter(DBSource.source==source)
2376
2377         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2378                                        filename, version, sourceful=True)
2379
2380     ################################################################################
2381     def check_dsc_against_db(self, filename, session):
2382         """
2383
2384         @warning: NB: this function can remove entries from the 'files' index [if
2385          the orig tarball is a duplicate of the one in the archive]; if
2386          you're iterating over 'files' and call this function as part of
2387          the loop, be sure to add a check to the top of the loop to
2388          ensure you haven't just tried to dereference the deleted entry.
2389
2390         """
2391
2392         Cnf = Config()
2393         self.pkg.orig_files = {} # XXX: do we need to clear it?
2394         orig_files = self.pkg.orig_files
2395
2396         # Try and find all files mentioned in the .dsc.  This has
2397         # to work harder to cope with the multiple possible
2398         # locations of an .orig.tar.gz.
2399         # The ordering on the select is needed to pick the newest orig
2400         # when it exists in multiple places.
2401         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2402             found = None
2403             if self.pkg.files.has_key(dsc_name):
2404                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2405                 actual_size = int(self.pkg.files[dsc_name]["size"])
2406                 found = "%s in incoming" % (dsc_name)
2407
2408                 # Check the file does not already exist in the archive
2409                 ql = get_poolfile_like_name(dsc_name, session)
2410
2411                 # Strip out anything that isn't '%s' or '/%s$'
2412                 for i in ql:
2413                     if not i.filename.endswith(dsc_name):
2414                         ql.remove(i)
2415
2416                 # "[dak] has not broken them.  [dak] has fixed a
2417                 # brokenness.  Your crappy hack exploited a bug in
2418                 # the old dinstall.
2419                 #
2420                 # "(Come on!  I thought it was always obvious that
2421                 # one just doesn't release different files with
2422                 # the same name and version.)"
2423                 #                        -- ajk@ on d-devel@l.d.o
2424
2425                 if len(ql) > 0:
2426                     # Ignore exact matches for .orig.tar.gz
2427                     match = 0
2428                     if re_is_orig_source.match(dsc_name):
2429                         for i in ql:
2430                             if self.pkg.files.has_key(dsc_name) and \
2431                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2432                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2433                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2434                                 # TODO: Don't delete the entry, just mark it as not needed
2435                                 # This would fix the stupidity of changing something we often iterate over
2436                                 # whilst we're doing it
2437                                 del self.pkg.files[dsc_name]
2438                                 dsc_entry["files id"] = i.file_id
2439                                 if not orig_files.has_key(dsc_name):
2440                                     orig_files[dsc_name] = {}
2441                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2442                                 match = 1
2443
2444                                 # Don't bitch that we couldn't find this file later
2445                                 try:
2446                                     self.later_check_files.remove(dsc_name)
2447                                 except ValueError:
2448                                     pass
2449
2450
2451                     if not match:
2452                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2453
2454             elif re_is_orig_source.match(dsc_name):
2455                 # Check in the pool
2456                 ql = get_poolfile_like_name(dsc_name, session)
2457
2458                 # Strip out anything that isn't '%s' or '/%s$'
2459                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2460                 for i in ql:
2461                     if not i.filename.endswith(dsc_name):
2462                         ql.remove(i)
2463
2464                 if len(ql) > 0:
2465                     # Unfortunately, we may get more than one match here if,
2466                     # for example, the package was in potato but had an -sa
2467                     # upload in woody.  So we need to choose the right one.
2468
2469                     # default to something sane in case we don't match any or have only one
2470                     x = ql[0]
2471
2472                     if len(ql) > 1:
2473                         for i in ql:
2474                             old_file = os.path.join(i.location.path, i.filename)
2475                             old_file_fh = utils.open_file(old_file)
2476                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2477                             old_file_fh.close()
2478                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2479                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2480                                 x = i
2481
2482                     old_file = os.path.join(i.location.path, i.filename)
2483                     old_file_fh = utils.open_file(old_file)
2484                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2485                     old_file_fh.close()
2486                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2487                     found = old_file
2488                     suite_type = x.location.archive_type
2489                     # need this for updating dsc_files in install()
2490                     dsc_entry["files id"] = x.file_id
2491                     # See install() in process-accepted...
2492                     if not orig_files.has_key(dsc_name):
2493                         orig_files[dsc_name] = {}
2494                     orig_files[dsc_name]["id"] = x.file_id
2495                     orig_files[dsc_name]["path"] = old_file
2496                     orig_files[dsc_name]["location"] = x.location.location_id
2497                 else:
2498                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2499                     # Not there? Check the queue directories...
2500                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2501                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2502                             continue
2503                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2504                         if os.path.exists(in_otherdir):
2505                             in_otherdir_fh = utils.open_file(in_otherdir)
2506                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2507                             in_otherdir_fh.close()
2508                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2509                             found = in_otherdir
2510                             if not orig_files.has_key(dsc_name):
2511                                 orig_files[dsc_name] = {}
2512                             orig_files[dsc_name]["path"] = in_otherdir
2513
2514                     if not found:
2515                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2516                         continue
2517             else:
2518                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2519                 continue
2520             if actual_md5 != dsc_entry["md5sum"]:
2521                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2522             if actual_size != int(dsc_entry["size"]):
2523                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2524
2525     ################################################################################
2526     # This is used by process-new and process-holding to recheck a changes file
2527     # at the time we're running.  It mainly wraps various other internal functions
2528     # and is similar to accepted_checks - these should probably be tidied up
2529     # and combined
2530     def recheck(self, session):
2531         cnf = Config()
2532         for f in self.pkg.files.keys():
2533             # The .orig.tar.gz can disappear out from under us is it's a
2534             # duplicate of one in the archive.
2535             if not self.pkg.files.has_key(f):
2536                 continue
2537
2538             entry = self.pkg.files[f]
2539
2540             # Check that the source still exists
2541             if entry["type"] == "deb":
2542                 source_version = entry["source version"]
2543                 source_package = entry["source package"]
2544                 if not self.pkg.changes["architecture"].has_key("source") \
2545                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2546                     source_epochless_version = re_no_epoch.sub('', source_version)
2547                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2548                     found = False
2549                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2550                         if cnf.has_key("Dir::Queue::%s" % (q)):
2551                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2552                                 found = True
2553                     if not found:
2554                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2555
2556             # Version and file overwrite checks
2557             if entry["type"] == "deb":
2558                 self.check_binary_against_db(f, session)
2559             elif entry["type"] == "dsc":
2560                 self.check_source_against_db(f, session)
2561                 self.check_dsc_against_db(f, session)
2562
2563     ################################################################################
2564     def accepted_checks(self, overwrite_checks, session):
2565         # Recheck anything that relies on the database; since that's not
2566         # frozen between accept and our run time when called from p-a.
2567
2568         # overwrite_checks is set to False when installing to stable/oldstable
2569
2570         propogate={}
2571         nopropogate={}
2572
2573         # Find the .dsc (again)
2574         dsc_filename = None
2575         for f in self.pkg.files.keys():
2576             if self.pkg.files[f]["type"] == "dsc":
2577                 dsc_filename = f
2578
2579         for checkfile in self.pkg.files.keys():
2580             # The .orig.tar.gz can disappear out from under us is it's a
2581             # duplicate of one in the archive.
2582             if not self.pkg.files.has_key(checkfile):
2583                 continue
2584
2585             entry = self.pkg.files[checkfile]
2586
2587             # Check that the source still exists
2588             if entry["type"] == "deb":
2589                 source_version = entry["source version"]
2590                 source_package = entry["source package"]
2591                 if not self.pkg.changes["architecture"].has_key("source") \
2592                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2593                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2594
2595             # Version and file overwrite checks
2596             if overwrite_checks:
2597                 if entry["type"] == "deb":
2598                     self.check_binary_against_db(checkfile, session)
2599                 elif entry["type"] == "dsc":
2600                     self.check_source_against_db(checkfile, session)
2601                     self.check_dsc_against_db(dsc_filename, session)
2602
2603             # propogate in the case it is in the override tables:
2604             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2605                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2606                     propogate[suite] = 1
2607                 else:
2608                     nopropogate[suite] = 1
2609
2610         for suite in propogate.keys():
2611             if suite in nopropogate:
2612                 continue
2613             self.pkg.changes["distribution"][suite] = 1
2614
2615         for checkfile in self.pkg.files.keys():
2616             # Check the package is still in the override tables
2617             for suite in self.pkg.changes["distribution"].keys():
2618                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2619                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2620
2621     ################################################################################
2622     # This is not really a reject, but an unaccept, but since a) the code for
2623     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2624     # extremely rare, for now we'll go with whining at our admin folks...
2625
2626     def do_unaccept(self):
2627         cnf = Config()
2628
2629         self.update_subst()
2630         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2631         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2632         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2633         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2634         if cnf.has_key("Dinstall::Bcc"):
2635             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2636
2637         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2638
2639         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2640
2641         # Write the rejection email out as the <foo>.reason file
2642         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2643         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2644
2645         # If we fail here someone is probably trying to exploit the race
2646         # so let's just raise an exception ...
2647         if os.path.exists(reject_filename):
2648             os.unlink(reject_filename)
2649
2650         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2651         os.write(fd, reject_mail_message)
2652         os.close(fd)
2653
2654         utils.send_mail(reject_mail_message)
2655
2656         del self.Subst["__REJECTOR_ADDRESS__"]
2657         del self.Subst["__REJECT_MESSAGE__"]
2658         del self.Subst["__CC__"]
2659
2660     ################################################################################
2661     # If any file of an upload has a recent mtime then chances are good
2662     # the file is still being uploaded.
2663
2664     def upload_too_new(self):
2665         cnf = Config()
2666         too_new = False
2667         # Move back to the original directory to get accurate time stamps
2668         cwd = os.getcwd()
2669         os.chdir(self.pkg.directory)
2670         file_list = self.pkg.files.keys()
2671         file_list.extend(self.pkg.dsc_files.keys())
2672         file_list.append(self.pkg.changes_file)
2673         for f in file_list:
2674             try:
2675                 last_modified = time.time()-os.path.getmtime(f)
2676                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2677                     too_new = True
2678                     break
2679             except:
2680                 pass
2681
2682         os.chdir(cwd)
2683         return too_new
2684
2685     def store_changelog(self):
2686
2687         # Skip binary-only upload if it is not a bin-NMU
2688         if not self.pkg.changes['architecture'].has_key('source'):
2689             from daklib.regexes import re_bin_only_nmu
2690             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2691                 return
2692
2693         session = DBConn().session()
2694
2695         # Check if upload already has a changelog entry
2696         query = """SELECT changelog_id FROM changes WHERE source = :source
2697                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2698         if session.execute(query, {'source': self.pkg.changes['source'], \
2699                                    'version': self.pkg.changes['version'], \
2700                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2701             session.commit()
2702             return
2703
2704         # Add current changelog text into changelogs_text table, return created ID
2705         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2706         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2707
2708         # Link ID to the upload available in changes table
2709         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2710                    AND version = :version AND architecture = :architecture"""
2711         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2712                                 'version': self.pkg.changes['version'], \
2713                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2714
2715         session.commit()