]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
And one more in public
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
82
83     # Validate the override type
84     type_id = get_override_type(file_type, session)
85     if type_id is None:
86         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
87
88     return file_type
89
90 ################################################################################
91
92 # Determine what parts in a .changes are NEW
93
94 def determine_new(changes, files, warn=1):
95     """
96     Determine what parts in a C{changes} file are NEW.
97
98     @type changes: Upload.Pkg.changes dict
99     @param changes: Changes dictionary
100
101     @type files: Upload.Pkg.files dict
102     @param files: Files dictionary
103
104     @type warn: bool
105     @param warn: Warn if overrides are added for (old)stable
106
107     @rtype: dict
108     @return: dictionary of NEW components.
109
110     """
111     new = {}
112
113     session = DBConn().session()
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Skip byhand elements
118 #        if f["type"] == "byhand":
119 #            continue
120         pkg = f["package"]
121         priority = f["priority"]
122         section = f["section"]
123         file_type = get_type(f, session)
124         component = f["component"]
125
126         if file_type == "dsc":
127             priority = "source"
128
129         if not new.has_key(pkg):
130             new[pkg] = {}
131             new[pkg]["priority"] = priority
132             new[pkg]["section"] = section
133             new[pkg]["type"] = file_type
134             new[pkg]["component"] = component
135             new[pkg]["files"] = []
136         else:
137             old_type = new[pkg]["type"]
138             if old_type != file_type:
139                 # source gets trumped by deb or udeb
140                 if old_type == "dsc":
141                     new[pkg]["priority"] = priority
142                     new[pkg]["section"] = section
143                     new[pkg]["type"] = file_type
144                     new[pkg]["component"] = component
145
146         new[pkg]["files"].append(name)
147
148         if f.has_key("othercomponents"):
149             new[pkg]["othercomponents"] = f["othercomponents"]
150
151     # Fix up the list of target suites
152     cnf = Config()
153     for suite in changes["suite"].keys():
154         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155         if override:
156             (olderr, newerr) = (get_suite(suite, session) == None,
157                                 get_suite(override, session) == None)
158             if olderr or newerr:
159                 (oinv, newinv) = ("", "")
160                 if olderr: oinv = "invalid "
161                 if newerr: ninv = "invalid "
162                 print "warning: overriding %ssuite %s to %ssuite %s" % (
163                         oinv, suite, ninv, override)
164             del changes["suite"][suite]
165             changes["suite"][override] = 1
166
167     for suite in changes["suite"].keys():
168         for pkg in new.keys():
169             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170             if len(ql) > 0:
171                 for file_entry in new[pkg]["files"]:
172                     if files[file_entry].has_key("new"):
173                         del files[file_entry]["new"]
174                 del new[pkg]
175
176     if warn:
177         for s in ['stable', 'oldstable']:
178             if changes["suite"].has_key(s):
179                 print "WARNING: overrides will be added for %s!" % s
180         for pkg in new.keys():
181             if new[pkg].has_key("othercomponents"):
182                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
183
184     session.close()
185
186     return new
187
188 ################################################################################
189
190 def check_valid(new):
191     """
192     Check if section and priority for NEW packages exist in database.
193     Additionally does sanity checks:
194       - debian-installer packages have to be udeb (or source)
195       - non debian-installer packages can not be udeb
196       - source priority can only be assigned to dsc file types
197
198     @type new: dict
199     @param new: Dict of new packages with their section, priority and type.
200
201     """
202     for pkg in new.keys():
203         section_name = new[pkg]["section"]
204         priority_name = new[pkg]["priority"]
205         file_type = new[pkg]["type"]
206
207         section = get_section(section_name)
208         if section is None:
209             new[pkg]["section id"] = -1
210         else:
211             new[pkg]["section id"] = section.section_id
212
213         priority = get_priority(priority_name)
214         if priority is None:
215             new[pkg]["priority id"] = -1
216         else:
217             new[pkg]["priority id"] = priority.priority_id
218
219         # Sanity checks
220         di = section_name.find("debian-installer") != -1
221
222         # If d-i, we must be udeb and vice-versa
223         if     (di and file_type not in ("udeb", "dsc")) or \
224            (not di and file_type == "udeb"):
225             new[pkg]["section id"] = -1
226
227         # If dsc we need to be source and vice-versa
228         if (priority == "source" and file_type != "dsc") or \
229            (priority != "source" and file_type == "dsc"):
230             new[pkg]["priority id"] = -1
231
232 ###############################################################################
233
234 # Used by Upload.check_timestamps
235 class TarTime(object):
236     def __init__(self, future_cutoff, past_cutoff):
237         self.reset()
238         self.future_cutoff = future_cutoff
239         self.past_cutoff = past_cutoff
240
241     def reset(self):
242         self.future_files = {}
243         self.ancient_files = {}
244
245     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
246         if MTime > self.future_cutoff:
247             self.future_files[Name] = MTime
248         if MTime < self.past_cutoff:
249             self.ancient_files[Name] = MTime
250
251 ###############################################################################
252
253 class Upload(object):
254     """
255     Everything that has to do with an upload processed.
256
257     """
258     def __init__(self):
259         self.logger = None
260         self.pkg = Changes()
261         self.reset()
262
263     ###########################################################################
264
265     def reset (self):
266         """ Reset a number of internal variables."""
267
268         # Initialize the substitution template map
269         cnf = Config()
270         self.Subst = {}
271         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
272         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
273         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
274         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
275
276         self.rejects = []
277         self.warnings = []
278         self.notes = []
279
280         self.later_check_files = []
281
282         self.pkg.reset()
283
284     def package_info(self):
285         """
286         Format various messages from this Upload to send to the maintainer.
287         """
288
289         msgs = (
290             ('Reject Reasons', self.rejects),
291             ('Warnings', self.warnings),
292             ('Notes', self.notes),
293         )
294
295         msg = ''
296         for title, messages in msgs:
297             if messages:
298                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
299         msg += '\n\n'
300
301         return msg
302
303     ###########################################################################
304     def update_subst(self):
305         """ Set up the per-package template substitution mappings """
306
307         cnf = Config()
308
309         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310         if not self.pkg.changes.has_key("architecture") or not \
311            isinstance(self.pkg.changes["architecture"], dict):
312             self.pkg.changes["architecture"] = { "Unknown" : "" }
313
314         # and maintainer2047 may not exist.
315         if not self.pkg.changes.has_key("maintainer2047"):
316             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
317
318         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
321
322         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323         if self.pkg.changes["architecture"].has_key("source") and \
324            self.pkg.changes["changedby822"] != "" and \
325            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
326
327             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
330         else:
331             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
334
335         if "sponsoremail" in self.pkg.changes:
336             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
337
338         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
340
341         # Apply any global override of the Maintainer field
342         if cnf.get("Dinstall::OverrideMaintainer"):
343             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
345
346         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
349
350     ###########################################################################
351     def load_changes(self, filename):
352         """
353         Load a changes file and setup a dictionary around it. Also checks for mandantory
354         fields  within.
355
356         @type filename: string
357         @param filename: Changes filename, full path.
358
359         @rtype: boolean
360         @return: whether the changes file was valid or not.  We may want to
361                  reject even if this is True (see what gets put in self.rejects).
362                  This is simply to prevent us even trying things later which will
363                  fail because we couldn't properly parse the file.
364         """
365         Cnf = Config()
366         self.pkg.changes_file = filename
367
368         # Parse the .changes field into a dictionary
369         try:
370             self.pkg.changes.update(parse_changes(filename))
371         except CantOpenError:
372             self.rejects.append("%s: can't read file." % (filename))
373             return False
374         except ParseChangesError, line:
375             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
376             return False
377         except ChangesUnicodeError:
378             self.rejects.append("%s: changes file not proper utf-8" % (filename))
379             return False
380
381         # Parse the Files field from the .changes into another dictionary
382         try:
383             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
384         except ParseChangesError, line:
385             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
386             return False
387         except UnknownFormatError, format:
388             self.rejects.append("%s: unknown format '%s'." % (filename, format))
389             return False
390
391         # Check for mandatory fields
392         for i in ("distribution", "source", "binary", "architecture",
393                   "version", "maintainer", "files", "changes", "description"):
394             if not self.pkg.changes.has_key(i):
395                 # Avoid undefined errors later
396                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
397                 return False
398
399         # Strip a source version in brackets from the source field
400         if re_strip_srcver.search(self.pkg.changes["source"]):
401             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
402
403         # Ensure the source field is a valid package name.
404         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
405             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
406
407         # Split multi-value fields into a lower-level dictionary
408         for i in ("architecture", "distribution", "binary", "closes"):
409             o = self.pkg.changes.get(i, "")
410             if o != "":
411                 del self.pkg.changes[i]
412
413             self.pkg.changes[i] = {}
414
415             for j in o.split():
416                 self.pkg.changes[i][j] = 1
417
418         # Fix the Maintainer: field to be RFC822/2047 compatible
419         try:
420             (self.pkg.changes["maintainer822"],
421              self.pkg.changes["maintainer2047"],
422              self.pkg.changes["maintainername"],
423              self.pkg.changes["maintaineremail"]) = \
424                    fix_maintainer (self.pkg.changes["maintainer"])
425         except ParseMaintError, msg:
426             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
427                    % (filename, self.pkg.changes["maintainer"], msg))
428
429         # ...likewise for the Changed-By: field if it exists.
430         try:
431             (self.pkg.changes["changedby822"],
432              self.pkg.changes["changedby2047"],
433              self.pkg.changes["changedbyname"],
434              self.pkg.changes["changedbyemail"]) = \
435                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
436         except ParseMaintError, msg:
437             self.pkg.changes["changedby822"] = ""
438             self.pkg.changes["changedby2047"] = ""
439             self.pkg.changes["changedbyname"] = ""
440             self.pkg.changes["changedbyemail"] = ""
441
442             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
443                    % (filename, self.pkg.changes["changed-by"], msg))
444
445         # Ensure all the values in Closes: are numbers
446         if self.pkg.changes.has_key("closes"):
447             for i in self.pkg.changes["closes"].keys():
448                 if re_isanum.match (i) == None:
449                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
450
451         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
452         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
453         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
454
455         # Check the .changes is non-empty
456         if not self.pkg.files:
457             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
458             return False
459
460         # Changes was syntactically valid even if we'll reject
461         return True
462
463     ###########################################################################
464
465     def check_distributions(self):
466         "Check and map the Distribution field"
467
468         Cnf = Config()
469
470         # Handle suite mappings
471         for m in Cnf.ValueList("SuiteMappings"):
472             args = m.split()
473             mtype = args[0]
474             if mtype == "map" or mtype == "silent-map":
475                 (source, dest) = args[1:3]
476                 if self.pkg.changes["distribution"].has_key(source):
477                     del self.pkg.changes["distribution"][source]
478                     self.pkg.changes["distribution"][dest] = 1
479                     if mtype != "silent-map":
480                         self.notes.append("Mapping %s to %s." % (source, dest))
481                 if self.pkg.changes.has_key("distribution-version"):
482                     if self.pkg.changes["distribution-version"].has_key(source):
483                         self.pkg.changes["distribution-version"][source]=dest
484             elif mtype == "map-unreleased":
485                 (source, dest) = args[1:3]
486                 if self.pkg.changes["distribution"].has_key(source):
487                     for arch in self.pkg.changes["architecture"].keys():
488                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
489                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
490                             del self.pkg.changes["distribution"][source]
491                             self.pkg.changes["distribution"][dest] = 1
492                             break
493             elif mtype == "ignore":
494                 suite = args[1]
495                 if self.pkg.changes["distribution"].has_key(suite):
496                     del self.pkg.changes["distribution"][suite]
497                     self.warnings.append("Ignoring %s as a target suite." % (suite))
498             elif mtype == "reject":
499                 suite = args[1]
500                 if self.pkg.changes["distribution"].has_key(suite):
501                     self.rejects.append("Uploads to %s are not accepted." % (suite))
502             elif mtype == "propup-version":
503                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
504                 #
505                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
506                 if self.pkg.changes["distribution"].has_key(args[1]):
507                     self.pkg.changes.setdefault("distribution-version", {})
508                     for suite in args[2:]:
509                         self.pkg.changes["distribution-version"][suite] = suite
510
511         # Ensure there is (still) a target distribution
512         if len(self.pkg.changes["distribution"].keys()) < 1:
513             self.rejects.append("No valid distribution remaining.")
514
515         # Ensure target distributions exist
516         for suite in self.pkg.changes["distribution"].keys():
517             if not Cnf.has_key("Suite::%s" % (suite)):
518                 self.rejects.append("Unknown distribution `%s'." % (suite))
519
520     ###########################################################################
521
522     def binary_file_checks(self, f, session):
523         cnf = Config()
524         entry = self.pkg.files[f]
525
526         # Extract package control information
527         deb_file = utils.open_file(f)
528         try:
529             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
530         except:
531             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
532             deb_file.close()
533             # Can't continue, none of the checks on control would work.
534             return
535
536         # Check for mandantory "Description:"
537         deb_file.seek(0)
538         try:
539             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
540         except:
541             self.rejects.append("%s: Missing Description in binary package" % (f))
542             return
543
544         deb_file.close()
545
546         # Check for mandatory fields
547         for field in [ "Package", "Architecture", "Version" ]:
548             if control.Find(field) == None:
549                 # Can't continue
550                 self.rejects.append("%s: No %s field in control." % (f, field))
551                 return
552
553         # Ensure the package name matches the one give in the .changes
554         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
555             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
556
557         # Validate the package field
558         package = control.Find("Package")
559         if not re_valid_pkg_name.match(package):
560             self.rejects.append("%s: invalid package name '%s'." % (f, package))
561
562         # Validate the version field
563         version = control.Find("Version")
564         if not re_valid_version.match(version):
565             self.rejects.append("%s: invalid version number '%s'." % (f, version))
566
567         # Ensure the architecture of the .deb is one we know about.
568         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
569         architecture = control.Find("Architecture")
570         upload_suite = self.pkg.changes["distribution"].keys()[0]
571
572         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
573             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
574             self.rejects.append("Unknown architecture '%s'." % (architecture))
575
576         # Ensure the architecture of the .deb is one of the ones
577         # listed in the .changes.
578         if not self.pkg.changes["architecture"].has_key(architecture):
579             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
580
581         # Sanity-check the Depends field
582         depends = control.Find("Depends")
583         if depends == '':
584             self.rejects.append("%s: Depends field is empty." % (f))
585
586         # Sanity-check the Provides field
587         provides = control.Find("Provides")
588         if provides:
589             provide = re_spacestrip.sub('', provides)
590             if provide == '':
591                 self.rejects.append("%s: Provides field is empty." % (f))
592             prov_list = provide.split(",")
593             for prov in prov_list:
594                 if not re_valid_pkg_name.match(prov):
595                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
596
597         # Check the section & priority match those given in the .changes (non-fatal)
598         if     control.Find("Section") and entry["section"] != "" \
599            and entry["section"] != control.Find("Section"):
600             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
601                                 (f, control.Find("Section", ""), entry["section"]))
602         if control.Find("Priority") and entry["priority"] != "" \
603            and entry["priority"] != control.Find("Priority"):
604             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
605                                 (f, control.Find("Priority", ""), entry["priority"]))
606
607         entry["package"] = package
608         entry["architecture"] = architecture
609         entry["version"] = version
610         entry["maintainer"] = control.Find("Maintainer", "")
611
612         if f.endswith(".udeb"):
613             self.pkg.files[f]["dbtype"] = "udeb"
614         elif f.endswith(".deb"):
615             self.pkg.files[f]["dbtype"] = "deb"
616         else:
617             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
618
619         entry["source"] = control.Find("Source", entry["package"])
620
621         # Get the source version
622         source = entry["source"]
623         source_version = ""
624
625         if source.find("(") != -1:
626             m = re_extract_src_version.match(source)
627             source = m.group(1)
628             source_version = m.group(2)
629
630         if not source_version:
631             source_version = self.pkg.files[f]["version"]
632
633         entry["source package"] = source
634         entry["source version"] = source_version
635
636         # Ensure the filename matches the contents of the .deb
637         m = re_isadeb.match(f)
638
639         #  package name
640         file_package = m.group(1)
641         if entry["package"] != file_package:
642             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
643                                 (f, file_package, entry["dbtype"], entry["package"]))
644         epochless_version = re_no_epoch.sub('', control.Find("Version"))
645
646         #  version
647         file_version = m.group(2)
648         if epochless_version != file_version:
649             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
650                                 (f, file_version, entry["dbtype"], epochless_version))
651
652         #  architecture
653         file_architecture = m.group(3)
654         if entry["architecture"] != file_architecture:
655             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
656                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
657
658         # Check for existent source
659         source_version = entry["source version"]
660         source_package = entry["source package"]
661         if self.pkg.changes["architecture"].has_key("source"):
662             if source_version != self.pkg.changes["version"]:
663                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
664                                     (source_version, f, self.pkg.changes["version"]))
665         else:
666             # Check in the SQL database
667             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
668                 # Check in one of the other directories
669                 source_epochless_version = re_no_epoch.sub('', source_version)
670                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
671                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
672                     entry["byhand"] = 1
673                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
674                     entry["new"] = 1
675                 else:
676                     dsc_file_exists = False
677                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
678                         if cnf.has_key("Dir::Queue::%s" % (myq)):
679                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
680                                 dsc_file_exists = True
681                                 break
682
683                     if not dsc_file_exists:
684                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
685
686         # Check the version and for file overwrites
687         self.check_binary_against_db(f, session)
688
689         # Temporarily disable contents generation until we change the table storage layout
690         #b = Binary(f)
691         #b.scan_package()
692         #if len(b.rejects) > 0:
693         #    for j in b.rejects:
694         #        self.rejects.append(j)
695
696     def source_file_checks(self, f, session):
697         entry = self.pkg.files[f]
698
699         m = re_issource.match(f)
700         if not m:
701             return
702
703         entry["package"] = m.group(1)
704         entry["version"] = m.group(2)
705         entry["type"] = m.group(3)
706
707         # Ensure the source package name matches the Source filed in the .changes
708         if self.pkg.changes["source"] != entry["package"]:
709             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
710
711         # Ensure the source version matches the version in the .changes file
712         if re_is_orig_source.match(f):
713             changes_version = self.pkg.changes["chopversion2"]
714         else:
715             changes_version = self.pkg.changes["chopversion"]
716
717         if changes_version != entry["version"]:
718             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
719
720         # Ensure the .changes lists source in the Architecture field
721         if not self.pkg.changes["architecture"].has_key("source"):
722             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
723
724         # Check the signature of a .dsc file
725         if entry["type"] == "dsc":
726             # check_signature returns either:
727             #  (None, [list, of, rejects]) or (signature, [])
728             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
729             for j in rejects:
730                 self.rejects.append(j)
731
732         entry["architecture"] = "source"
733
734     def per_suite_file_checks(self, f, suite, session):
735         cnf = Config()
736         entry = self.pkg.files[f]
737
738         # Skip byhand
739         if entry.has_key("byhand"):
740             return
741
742         # Check we have fields we need to do these checks
743         oktogo = True
744         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
745             if not entry.has_key(m):
746                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
747                 oktogo = False
748
749         if not oktogo:
750             return
751
752         # Handle component mappings
753         for m in cnf.ValueList("ComponentMappings"):
754             (source, dest) = m.split()
755             if entry["component"] == source:
756                 entry["original component"] = source
757                 entry["component"] = dest
758
759         # Ensure the component is valid for the target suite
760         if cnf.has_key("Suite:%s::Components" % (suite)) and \
761            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
762             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
763             return
764
765         # Validate the component
766         if not get_component(entry["component"], session):
767             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
768             return
769
770         # See if the package is NEW
771         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
772             entry["new"] = 1
773
774         # Validate the priority
775         if entry["priority"].find('/') != -1:
776             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
777
778         # Determine the location
779         location = cnf["Dir::Pool"]
780         l = get_location(location, entry["component"], session=session)
781         if l is None:
782             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
783             entry["location id"] = -1
784         else:
785             entry["location id"] = l.location_id
786
787         # Check the md5sum & size against existing files (if any)
788         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
789
790         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
791                                          entry["size"], entry["md5sum"], entry["location id"])
792
793         if found is None:
794             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
795         elif found is False and poolfile is not None:
796             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
797         else:
798             if poolfile is None:
799                 entry["files id"] = None
800             else:
801                 entry["files id"] = poolfile.file_id
802
803         # Check for packages that have moved from one component to another
804         entry['suite'] = suite
805         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
806         if res.rowcount > 0:
807             entry["othercomponents"] = res.fetchone()[0]
808
809     def check_files(self, action=True):
810         file_keys = self.pkg.files.keys()
811         holding = Holding()
812         cnf = Config()
813
814         if action:
815             cwd = os.getcwd()
816             os.chdir(self.pkg.directory)
817             for f in file_keys:
818                 ret = holding.copy_to_holding(f)
819                 if ret is not None:
820                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
821
822             os.chdir(cwd)
823
824         # check we already know the changes file
825         # [NB: this check must be done post-suite mapping]
826         base_filename = os.path.basename(self.pkg.changes_file)
827
828         session = DBConn().session()
829
830         try:
831             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
832             # if in the pool or in a queue other than unchecked, reject
833             if (dbc.in_queue is None) \
834                    or (dbc.in_queue is not None
835                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
836                 self.rejects.append("%s file already known to dak" % base_filename)
837         except NoResultFound, e:
838             # not known, good
839             pass
840
841         has_binaries = False
842         has_source = False
843
844         for f, entry in self.pkg.files.items():
845             # Ensure the file does not already exist in one of the accepted directories
846             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
847                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
848                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
849                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
850
851             if not re_taint_free.match(f):
852                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
853
854             # Check the file is readable
855             if os.access(f, os.R_OK) == 0:
856                 # When running in -n, copy_to_holding() won't have
857                 # generated the reject_message, so we need to.
858                 if action:
859                     if os.path.exists(f):
860                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
861                     else:
862                         # Don't directly reject, mark to check later to deal with orig's
863                         # we can find in the pool
864                         self.later_check_files.append(f)
865                 entry["type"] = "unreadable"
866                 continue
867
868             # If it's byhand skip remaining checks
869             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
870                 entry["byhand"] = 1
871                 entry["type"] = "byhand"
872
873             # Checks for a binary package...
874             elif re_isadeb.match(f):
875                 has_binaries = True
876                 entry["type"] = "deb"
877
878                 # This routine appends to self.rejects/warnings as appropriate
879                 self.binary_file_checks(f, session)
880
881             # Checks for a source package...
882             elif re_issource.match(f):
883                 has_source = True
884
885                 # This routine appends to self.rejects/warnings as appropriate
886                 self.source_file_checks(f, session)
887
888             # Not a binary or source package?  Assume byhand...
889             else:
890                 entry["byhand"] = 1
891                 entry["type"] = "byhand"
892
893             # Per-suite file checks
894             entry["oldfiles"] = {}
895             for suite in self.pkg.changes["distribution"].keys():
896                 self.per_suite_file_checks(f, suite, session)
897
898         session.close()
899
900         # If the .changes file says it has source, it must have source.
901         if self.pkg.changes["architecture"].has_key("source"):
902             if not has_source:
903                 self.rejects.append("no source found and Architecture line in changes mention source.")
904
905             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
906                 self.rejects.append("source only uploads are not supported.")
907
908     ###########################################################################
909     def check_dsc(self, action=True, session=None):
910         """Returns bool indicating whether or not the source changes are valid"""
911         # Ensure there is source to check
912         if not self.pkg.changes["architecture"].has_key("source"):
913             return True
914
915         # Find the .dsc
916         dsc_filename = None
917         for f, entry in self.pkg.files.items():
918             if entry["type"] == "dsc":
919                 if dsc_filename:
920                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
921                     return False
922                 else:
923                     dsc_filename = f
924
925         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
926         if not dsc_filename:
927             self.rejects.append("source uploads must contain a dsc file")
928             return False
929
930         # Parse the .dsc file
931         try:
932             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
933         except CantOpenError:
934             # if not -n copy_to_holding() will have done this for us...
935             if not action:
936                 self.rejects.append("%s: can't read file." % (dsc_filename))
937         except ParseChangesError, line:
938             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
939         except InvalidDscError, line:
940             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
941         except ChangesUnicodeError:
942             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
943
944         # Build up the file list of files mentioned by the .dsc
945         try:
946             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
947         except NoFilesFieldError:
948             self.rejects.append("%s: no Files: field." % (dsc_filename))
949             return False
950         except UnknownFormatError, format:
951             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
952             return False
953         except ParseChangesError, line:
954             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
955             return False
956
957         # Enforce mandatory fields
958         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
959             if not self.pkg.dsc.has_key(i):
960                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
961                 return False
962
963         # Validate the source and version fields
964         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
965             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
966         if not re_valid_version.match(self.pkg.dsc["version"]):
967             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
968
969         # Only a limited list of source formats are allowed in each suite
970         for dist in self.pkg.changes["distribution"].keys():
971             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
972             if self.pkg.dsc["format"] not in allowed:
973                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
974
975         # Validate the Maintainer field
976         try:
977             # We ignore the return value
978             fix_maintainer(self.pkg.dsc["maintainer"])
979         except ParseMaintError, msg:
980             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
981                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
982
983         # Validate the build-depends field(s)
984         for field_name in [ "build-depends", "build-depends-indep" ]:
985             field = self.pkg.dsc.get(field_name)
986             if field:
987                 # Have apt try to parse them...
988                 try:
989                     apt_pkg.ParseSrcDepends(field)
990                 except:
991                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
992
993         # Ensure the version number in the .dsc matches the version number in the .changes
994         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
995         changes_version = self.pkg.files[dsc_filename]["version"]
996
997         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
998             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
999
1000         # Ensure the Files field contain only what's expected
1001         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1002
1003         # Ensure source is newer than existing source in target suites
1004         session = DBConn().session()
1005         self.check_source_against_db(dsc_filename, session)
1006         self.check_dsc_against_db(dsc_filename, session)
1007         session.close()
1008
1009         # Finally, check if we're missing any files
1010         for f in self.later_check_files:
1011             self.rejects.append("Could not find file %s references in changes" % f)
1012
1013         return True
1014
1015     ###########################################################################
1016
1017     def get_changelog_versions(self, source_dir):
1018         """Extracts a the source package and (optionally) grabs the
1019         version history out of debian/changelog for the BTS."""
1020
1021         cnf = Config()
1022
1023         # Find the .dsc (again)
1024         dsc_filename = None
1025         for f in self.pkg.files.keys():
1026             if self.pkg.files[f]["type"] == "dsc":
1027                 dsc_filename = f
1028
1029         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1030         if not dsc_filename:
1031             return
1032
1033         # Create a symlink mirror of the source files in our temporary directory
1034         for f in self.pkg.files.keys():
1035             m = re_issource.match(f)
1036             if m:
1037                 src = os.path.join(source_dir, f)
1038                 # If a file is missing for whatever reason, give up.
1039                 if not os.path.exists(src):
1040                     return
1041                 ftype = m.group(3)
1042                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1043                    self.pkg.orig_files[f].has_key("path"):
1044                     continue
1045                 dest = os.path.join(os.getcwd(), f)
1046                 os.symlink(src, dest)
1047
1048         # If the orig files are not a part of the upload, create symlinks to the
1049         # existing copies.
1050         for orig_file in self.pkg.orig_files.keys():
1051             if not self.pkg.orig_files[orig_file].has_key("path"):
1052                 continue
1053             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1054             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1055
1056         # Extract the source
1057         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1058         (result, output) = commands.getstatusoutput(cmd)
1059         if (result != 0):
1060             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1061             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1062             return
1063
1064         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1065             return
1066
1067         # Get the upstream version
1068         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1069         if re_strip_revision.search(upstr_version):
1070             upstr_version = re_strip_revision.sub('', upstr_version)
1071
1072         # Ensure the changelog file exists
1073         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1074         if not os.path.exists(changelog_filename):
1075             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1076             return
1077
1078         # Parse the changelog
1079         self.pkg.dsc["bts changelog"] = ""
1080         changelog_file = utils.open_file(changelog_filename)
1081         for line in changelog_file.readlines():
1082             m = re_changelog_versions.match(line)
1083             if m:
1084                 self.pkg.dsc["bts changelog"] += line
1085         changelog_file.close()
1086
1087         # Check we found at least one revision in the changelog
1088         if not self.pkg.dsc["bts changelog"]:
1089             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1090
1091     def check_source(self):
1092         # Bail out if:
1093         #    a) there's no source
1094         if not self.pkg.changes["architecture"].has_key("source"):
1095             return
1096
1097         tmpdir = utils.temp_dirname()
1098
1099         # Move into the temporary directory
1100         cwd = os.getcwd()
1101         os.chdir(tmpdir)
1102
1103         # Get the changelog version history
1104         self.get_changelog_versions(cwd)
1105
1106         # Move back and cleanup the temporary tree
1107         os.chdir(cwd)
1108
1109         try:
1110             shutil.rmtree(tmpdir)
1111         except OSError, e:
1112             if e.errno != errno.EACCES:
1113                 print "foobar"
1114                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1115
1116             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1117             # We probably have u-r or u-w directories so chmod everything
1118             # and try again.
1119             cmd = "chmod -R u+rwx %s" % (tmpdir)
1120             result = os.system(cmd)
1121             if result != 0:
1122                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1123             shutil.rmtree(tmpdir)
1124         except Exception, e:
1125             print "foobar2 (%s)" % e
1126             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1127
1128     ###########################################################################
1129     def ensure_hashes(self):
1130         # Make sure we recognise the format of the Files: field in the .changes
1131         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1132         if len(format) == 2:
1133             format = int(format[0]), int(format[1])
1134         else:
1135             format = int(float(format[0])), 0
1136
1137         # We need to deal with the original changes blob, as the fields we need
1138         # might not be in the changes dict serialised into the .dak anymore.
1139         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1140
1141         # Copy the checksums over to the current changes dict.  This will keep
1142         # the existing modifications to it intact.
1143         for field in orig_changes:
1144             if field.startswith('checksums-'):
1145                 self.pkg.changes[field] = orig_changes[field]
1146
1147         # Check for unsupported hashes
1148         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1149             self.rejects.append(j)
1150
1151         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1152             self.rejects.append(j)
1153
1154         # We have to calculate the hash if we have an earlier changes version than
1155         # the hash appears in rather than require it exist in the changes file
1156         for hashname, hashfunc, version in utils.known_hashes:
1157             # TODO: Move _ensure_changes_hash into this class
1158             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1159                 self.rejects.append(j)
1160             if "source" in self.pkg.changes["architecture"]:
1161                 # TODO: Move _ensure_dsc_hash into this class
1162                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1163                     self.rejects.append(j)
1164
1165     def check_hashes(self):
1166         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1167             self.rejects.append(m)
1168
1169         for m in utils.check_size(".changes", self.pkg.files):
1170             self.rejects.append(m)
1171
1172         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1173             self.rejects.append(m)
1174
1175         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1176             self.rejects.append(m)
1177
1178         self.ensure_hashes()
1179
1180     ###########################################################################
1181
1182     def ensure_orig(self, target_dir='.', session=None):
1183         """
1184         Ensures that all orig files mentioned in the changes file are present
1185         in target_dir. If they do not exist, they are symlinked into place.
1186
1187         An list containing the symlinks that were created are returned (so they
1188         can be removed).
1189         """
1190
1191         symlinked = []
1192         cnf = Config()
1193
1194         for filename, entry in self.pkg.dsc_files.iteritems():
1195             if not re_is_orig_source.match(filename):
1196                 # File is not an orig; ignore
1197                 continue
1198
1199             if os.path.exists(filename):
1200                 # File exists, no need to continue
1201                 continue
1202
1203             def symlink_if_valid(path):
1204                 f = utils.open_file(path)
1205                 md5sum = apt_pkg.md5sum(f)
1206                 f.close()
1207
1208                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1209                 expected = (int(entry['size']), entry['md5sum'])
1210
1211                 if fingerprint != expected:
1212                     return False
1213
1214                 dest = os.path.join(target_dir, filename)
1215
1216                 os.symlink(path, dest)
1217                 symlinked.append(dest)
1218
1219                 return True
1220
1221             session_ = session
1222             if session is None:
1223                 session_ = DBConn().session()
1224
1225             found = False
1226
1227             # Look in the pool
1228             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1229                 poolfile_path = os.path.join(
1230                     poolfile.location.path, poolfile.filename
1231                 )
1232
1233                 if symlink_if_valid(poolfile_path):
1234                     found = True
1235                     break
1236
1237             if session is None:
1238                 session_.close()
1239
1240             if found:
1241                 continue
1242
1243             # Look in some other queues for the file
1244             queues = ('New', 'Byhand', 'ProposedUpdates',
1245                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1246
1247             for queue in queues:
1248                 if not cnf.get('Dir::Queue::%s' % queue):
1249                     continue
1250
1251                 queuefile_path = os.path.join(
1252                     cnf['Dir::Queue::%s' % queue], filename
1253                 )
1254
1255                 if not os.path.exists(queuefile_path):
1256                     # Does not exist in this queue
1257                     continue
1258
1259                 if symlink_if_valid(queuefile_path):
1260                     break
1261
1262         return symlinked
1263
1264     ###########################################################################
1265
1266     def check_lintian(self):
1267         """
1268         Extends self.rejects by checking the output of lintian against tags
1269         specified in Dinstall::LintianTags.
1270         """
1271
1272         cnf = Config()
1273
1274         # Don't reject binary uploads
1275         if not self.pkg.changes['architecture'].has_key('source'):
1276             return
1277
1278         # Only check some distributions
1279         for dist in ('unstable', 'experimental'):
1280             if dist in self.pkg.changes['distribution']:
1281                 break
1282         else:
1283             return
1284
1285         # If we do not have a tagfile, don't do anything
1286         tagfile = cnf.get("Dinstall::LintianTags")
1287         if tagfile is None:
1288             return
1289
1290         # Parse the yaml file
1291         sourcefile = file(tagfile, 'r')
1292         sourcecontent = sourcefile.read()
1293         sourcefile.close()
1294
1295         try:
1296             lintiantags = yaml.load(sourcecontent)['lintian']
1297         except yaml.YAMLError, msg:
1298             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1299             return
1300
1301         # Try and find all orig mentioned in the .dsc
1302         symlinked = self.ensure_orig()
1303
1304         # Setup the input file for lintian
1305         fd, temp_filename = utils.temp_filename()
1306         temptagfile = os.fdopen(fd, 'w')
1307         for tags in lintiantags.values():
1308             temptagfile.writelines(['%s\n' % x for x in tags])
1309         temptagfile.close()
1310
1311         try:
1312             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1313                 (temp_filename, self.pkg.changes_file)
1314
1315             result, output = commands.getstatusoutput(cmd)
1316         finally:
1317             # Remove our tempfile and any symlinks we created
1318             os.unlink(temp_filename)
1319
1320             for symlink in symlinked:
1321                 os.unlink(symlink)
1322
1323         if result == 2:
1324             utils.warn("lintian failed for %s [return code: %s]." % \
1325                 (self.pkg.changes_file, result))
1326             utils.warn(utils.prefix_multi_line_string(output, \
1327                 " [possible output:] "))
1328
1329         def log(*txt):
1330             if self.logger:
1331                 self.logger.log(
1332                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1333                 )
1334
1335         # Generate messages
1336         parsed_tags = parse_lintian_output(output)
1337         self.rejects.extend(
1338             generate_reject_messages(parsed_tags, lintiantags, log=log)
1339         )
1340
1341     ###########################################################################
1342     def check_urgency(self):
1343         cnf = Config()
1344         if self.pkg.changes["architecture"].has_key("source"):
1345             if not self.pkg.changes.has_key("urgency"):
1346                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1347             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1348             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1349                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1350                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1351                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1352
1353     ###########################################################################
1354
1355     # Sanity check the time stamps of files inside debs.
1356     # [Files in the near future cause ugly warnings and extreme time
1357     #  travel can cause errors on extraction]
1358
1359     def check_timestamps(self):
1360         Cnf = Config()
1361
1362         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1363         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1364         tar = TarTime(future_cutoff, past_cutoff)
1365
1366         for filename, entry in self.pkg.files.items():
1367             if entry["type"] == "deb":
1368                 tar.reset()
1369                 try:
1370                     deb_file = utils.open_file(filename)
1371                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1372                     deb_file.seek(0)
1373                     try:
1374                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1375                     except SystemError, e:
1376                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1377                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1378                             raise
1379                         deb_file.seek(0)
1380                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1381
1382                     deb_file.close()
1383
1384                     future_files = tar.future_files.keys()
1385                     if future_files:
1386                         num_future_files = len(future_files)
1387                         future_file = future_files[0]
1388                         future_date = tar.future_files[future_file]
1389                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1390                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1391
1392                     ancient_files = tar.ancient_files.keys()
1393                     if ancient_files:
1394                         num_ancient_files = len(ancient_files)
1395                         ancient_file = ancient_files[0]
1396                         ancient_date = tar.ancient_files[ancient_file]
1397                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1398                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1399                 except:
1400                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1401
1402     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1403         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1404             sponsored = False
1405         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1406             sponsored = False
1407             if uid_name == "":
1408                 sponsored = True
1409         else:
1410             sponsored = True
1411             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1412                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1413                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1414                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1415                         self.pkg.changes["sponsoremail"] = uid_email
1416
1417         return sponsored
1418
1419
1420     ###########################################################################
1421     # check_signed_by_key checks
1422     ###########################################################################
1423
1424     def check_signed_by_key(self):
1425         """Ensure the .changes is signed by an authorized uploader."""
1426         session = DBConn().session()
1427
1428         # First of all we check that the person has proper upload permissions
1429         # and that this upload isn't blocked
1430         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1431
1432         if fpr is None:
1433             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1434             return
1435
1436         # TODO: Check that import-keyring adds UIDs properly
1437         if not fpr.uid:
1438             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1439             return
1440
1441         # Check that the fingerprint which uploaded has permission to do so
1442         self.check_upload_permissions(fpr, session)
1443
1444         # Check that this package is not in a transition
1445         self.check_transition(session)
1446
1447         session.close()
1448
1449
1450     def check_upload_permissions(self, fpr, session):
1451         # Check any one-off upload blocks
1452         self.check_upload_blocks(fpr, session)
1453
1454         # Start with DM as a special case
1455         # DM is a special case unfortunately, so we check it first
1456         # (keys with no source access get more access than DMs in one
1457         #  way; DMs can only upload for their packages whether source
1458         #  or binary, whereas keys with no access might be able to
1459         #  upload some binaries)
1460         if fpr.source_acl.access_level == 'dm':
1461             self.check_dm_upload(fpr, session)
1462         else:
1463             # Check source-based permissions for other types
1464             if self.pkg.changes["architecture"].has_key("source") and \
1465                 fpr.source_acl.access_level is None:
1466                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1467                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1468                 self.rejects.append(rej)
1469                 return
1470             # If not a DM, we allow full upload rights
1471             uid_email = "%s@debian.org" % (fpr.uid.uid)
1472             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1473
1474
1475         # Check binary upload permissions
1476         # By this point we know that DMs can't have got here unless they
1477         # are allowed to deal with the package concerned so just apply
1478         # normal checks
1479         if fpr.binary_acl.access_level == 'full':
1480             return
1481
1482         # Otherwise we're in the map case
1483         tmparches = self.pkg.changes["architecture"].copy()
1484         tmparches.pop('source', None)
1485
1486         for bam in fpr.binary_acl_map:
1487             tmparches.pop(bam.architecture.arch_string, None)
1488
1489         if len(tmparches.keys()) > 0:
1490             if fpr.binary_reject:
1491                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1492                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1493                 self.rejects.append(rej)
1494             else:
1495                 # TODO: This is where we'll implement reject vs throw away binaries later
1496                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1497                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1498                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1499                 self.rejects.append(rej)
1500
1501
1502     def check_upload_blocks(self, fpr, session):
1503         """Check whether any upload blocks apply to this source, source
1504            version, uid / fpr combination"""
1505
1506         def block_rej_template(fb):
1507             rej = 'Manual upload block in place for package %s' % fb.source
1508             if fb.version is not None:
1509                 rej += ', version %s' % fb.version
1510             return rej
1511
1512         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1513             # version is None if the block applies to all versions
1514             if fb.version is None or fb.version == self.pkg.changes['version']:
1515                 # Check both fpr and uid - either is enough to cause a reject
1516                 if fb.fpr is not None:
1517                     if fb.fpr.fingerprint == fpr.fingerprint:
1518                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1519                 if fb.uid is not None:
1520                     if fb.uid == fpr.uid:
1521                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1522
1523
1524     def check_dm_upload(self, fpr, session):
1525         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1526         ## none of the uploaded packages are NEW
1527         rej = False
1528         for f in self.pkg.files.keys():
1529             if self.pkg.files[f].has_key("byhand"):
1530                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1531                 rej = True
1532             if self.pkg.files[f].has_key("new"):
1533                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1534                 rej = True
1535
1536         if rej:
1537             return
1538
1539         ## the most recent version of the package uploaded to unstable or
1540         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1541         ## section of its control file
1542         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1543         q = q.join(SrcAssociation)
1544         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1545         q = q.order_by(desc('source.version')).limit(1)
1546
1547         r = q.all()
1548
1549         if len(r) != 1:
1550             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1551             self.rejects.append(rej)
1552             return
1553
1554         r = r[0]
1555         if not r.dm_upload_allowed:
1556             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1557             self.rejects.append(rej)
1558             return
1559
1560         ## the Maintainer: field of the uploaded .changes file corresponds with
1561         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1562         ## uploads)
1563         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1564             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1565
1566         ## the most recent version of the package uploaded to unstable or
1567         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1568         ## non-developer maintainers cannot NMU or hijack packages)
1569
1570         # srcuploaders includes the maintainer
1571         accept = False
1572         for sup in r.srcuploaders:
1573             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1574             # Eww - I hope we never have two people with the same name in Debian
1575             if email == fpr.uid.uid or name == fpr.uid.name:
1576                 accept = True
1577                 break
1578
1579         if not accept:
1580             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1581             return
1582
1583         ## none of the packages are being taken over from other source packages
1584         for b in self.pkg.changes["binary"].keys():
1585             for suite in self.pkg.changes["distribution"].keys():
1586                 q = session.query(DBSource)
1587                 q = q.join(DBBinary).filter_by(package=b)
1588                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1589
1590                 for s in q.all():
1591                     if s.source != self.pkg.changes["source"]:
1592                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1593
1594
1595
1596     def check_transition(self, session):
1597         cnf = Config()
1598
1599         sourcepkg = self.pkg.changes["source"]
1600
1601         # No sourceful upload -> no need to do anything else, direct return
1602         # We also work with unstable uploads, not experimental or those going to some
1603         # proposed-updates queue
1604         if "source" not in self.pkg.changes["architecture"] or \
1605            "unstable" not in self.pkg.changes["distribution"]:
1606             return
1607
1608         # Also only check if there is a file defined (and existant) with
1609         # checks.
1610         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1611         if transpath == "" or not os.path.exists(transpath):
1612             return
1613
1614         # Parse the yaml file
1615         sourcefile = file(transpath, 'r')
1616         sourcecontent = sourcefile.read()
1617         try:
1618             transitions = yaml.load(sourcecontent)
1619         except yaml.YAMLError, msg:
1620             # This shouldn't happen, there is a wrapper to edit the file which
1621             # checks it, but we prefer to be safe than ending up rejecting
1622             # everything.
1623             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1624             return
1625
1626         # Now look through all defined transitions
1627         for trans in transitions:
1628             t = transitions[trans]
1629             source = t["source"]
1630             expected = t["new"]
1631
1632             # Will be None if nothing is in testing.
1633             current = get_source_in_suite(source, "testing", session)
1634             if current is not None:
1635                 compare = apt_pkg.VersionCompare(current.version, expected)
1636
1637             if current is None or compare < 0:
1638                 # This is still valid, the current version in testing is older than
1639                 # the new version we wait for, or there is none in testing yet
1640
1641                 # Check if the source we look at is affected by this.
1642                 if sourcepkg in t['packages']:
1643                     # The source is affected, lets reject it.
1644
1645                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1646                         sourcepkg, trans)
1647
1648                     if current is not None:
1649                         currentlymsg = "at version %s" % (current.version)
1650                     else:
1651                         currentlymsg = "not present in testing"
1652
1653                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1654
1655                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1656 is part of a testing transition designed to get %s migrated (it is
1657 currently %s, we need version %s).  This transition is managed by the
1658 Release Team, and %s is the Release-Team member responsible for it.
1659 Please mail debian-release@lists.debian.org or contact %s directly if you
1660 need further assistance.  You might want to upload to experimental until this
1661 transition is done."""
1662                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1663
1664                     self.rejects.append(rejectmsg)
1665                     return
1666
1667     ###########################################################################
1668     # End check_signed_by_key checks
1669     ###########################################################################
1670
1671     def build_summaries(self):
1672         """ Build a summary of changes the upload introduces. """
1673
1674         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1675
1676         short_summary = summary
1677
1678         # This is for direport's benefit...
1679         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1680
1681         if byhand or new:
1682             summary += "Changes: " + f
1683
1684         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1685
1686         summary += self.announce(short_summary, 0)
1687
1688         return (summary, short_summary)
1689
1690     ###########################################################################
1691
1692     def close_bugs(self, summary, action):
1693         """
1694         Send mail to close bugs as instructed by the closes field in the changes file.
1695         Also add a line to summary if any work was done.
1696
1697         @type summary: string
1698         @param summary: summary text, as given by L{build_summaries}
1699
1700         @type action: bool
1701         @param action: Set to false no real action will be done.
1702
1703         @rtype: string
1704         @return: summary. If action was taken, extended by the list of closed bugs.
1705
1706         """
1707
1708         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1709
1710         bugs = self.pkg.changes["closes"].keys()
1711
1712         if not bugs:
1713             return summary
1714
1715         bugs.sort()
1716         summary += "Closing bugs: "
1717         for bug in bugs:
1718             summary += "%s " % (bug)
1719             if action:
1720                 self.update_subst()
1721                 self.Subst["__BUG_NUMBER__"] = bug
1722                 if self.pkg.changes["distribution"].has_key("stable"):
1723                     self.Subst["__STABLE_WARNING__"] = """
1724 Note that this package is not part of the released stable Debian
1725 distribution.  It may have dependencies on other unreleased software,
1726 or other instabilities.  Please take care if you wish to install it.
1727 The update will eventually make its way into the next released Debian
1728 distribution."""
1729                 else:
1730                     self.Subst["__STABLE_WARNING__"] = ""
1731                 mail_message = utils.TemplateSubst(self.Subst, template)
1732                 utils.send_mail(mail_message)
1733
1734                 # Clear up after ourselves
1735                 del self.Subst["__BUG_NUMBER__"]
1736                 del self.Subst["__STABLE_WARNING__"]
1737
1738         if action and self.logger:
1739             self.logger.log(["closing bugs"] + bugs)
1740
1741         summary += "\n"
1742
1743         return summary
1744
1745     ###########################################################################
1746
1747     def announce(self, short_summary, action):
1748         """
1749         Send an announce mail about a new upload.
1750
1751         @type short_summary: string
1752         @param short_summary: Short summary text to include in the mail
1753
1754         @type action: bool
1755         @param action: Set to false no real action will be done.
1756
1757         @rtype: string
1758         @return: Textstring about action taken.
1759
1760         """
1761
1762         cnf = Config()
1763         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1764
1765         # Only do announcements for source uploads with a recent dpkg-dev installed
1766         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1767            self.pkg.changes["architecture"].has_key("source"):
1768             return ""
1769
1770         lists_done = {}
1771         summary = ""
1772
1773         self.Subst["__SHORT_SUMMARY__"] = short_summary
1774
1775         for dist in self.pkg.changes["distribution"].keys():
1776             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1777             if announce_list == "" or lists_done.has_key(announce_list):
1778                 continue
1779
1780             lists_done[announce_list] = 1
1781             summary += "Announcing to %s\n" % (announce_list)
1782
1783             if action:
1784                 self.update_subst()
1785                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1786                 if cnf.get("Dinstall::TrackingServer") and \
1787                    self.pkg.changes["architecture"].has_key("source"):
1788                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1789                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1790
1791                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1792                 utils.send_mail(mail_message)
1793
1794                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1795
1796         if cnf.FindB("Dinstall::CloseBugs"):
1797             summary = self.close_bugs(summary, action)
1798
1799         del self.Subst["__SHORT_SUMMARY__"]
1800
1801         return summary
1802
1803     ###########################################################################
1804     @session_wrapper
1805     def accept (self, summary, short_summary, session=None):
1806         """
1807         Accept an upload.
1808
1809         This moves all files referenced from the .changes into the pool,
1810         sends the accepted mail, announces to lists, closes bugs and
1811         also checks for override disparities. If enabled it will write out
1812         the version history for the BTS Version Tracking and will finally call
1813         L{queue_build}.
1814
1815         @type summary: string
1816         @param summary: Summary text
1817
1818         @type short_summary: string
1819         @param short_summary: Short summary
1820         """
1821
1822         cnf = Config()
1823         stats = SummaryStats()
1824
1825         print "Installing."
1826         self.logger.log(["installing changes", self.pkg.changes_file])
1827
1828         poolfiles = []
1829
1830         # Add the .dsc file to the DB first
1831         for newfile, entry in self.pkg.files.items():
1832             if entry["type"] == "dsc":
1833                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1834                 for j in pfs:
1835                     poolfiles.append(j)
1836
1837         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1838         for newfile, entry in self.pkg.files.items():
1839             if entry["type"] == "deb":
1840                 poolfiles.append(add_deb_to_db(self, newfile, session))
1841
1842         # If this is a sourceful diff only upload that is moving
1843         # cross-component we need to copy the .orig files into the new
1844         # component too for the same reasons as above.
1845         # XXX: mhy: I think this should be in add_dsc_to_db
1846         if self.pkg.changes["architecture"].has_key("source"):
1847             for orig_file in self.pkg.orig_files.keys():
1848                 if not self.pkg.orig_files[orig_file].has_key("id"):
1849                     continue # Skip if it's not in the pool
1850                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1851                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1852                     continue # Skip if the location didn't change
1853
1854                 # Do the move
1855                 oldf = get_poolfile_by_id(orig_file_id, session)
1856                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1857                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1858                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1859
1860                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1861
1862                 # TODO: Care about size/md5sum collisions etc
1863                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1864
1865                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1866                 if newf is None:
1867                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1868                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1869
1870                     session.flush()
1871
1872                     # Don't reference the old file from this changes
1873                     for p in poolfiles:
1874                         if p.file_id == oldf.file_id:
1875                             poolfiles.remove(p)
1876
1877                     poolfiles.append(newf)
1878
1879                     # Fix up the DSC references
1880                     toremove = []
1881
1882                     for df in source.srcfiles:
1883                         if df.poolfile.file_id == oldf.file_id:
1884                             # Add a new DSC entry and mark the old one for deletion
1885                             # Don't do it in the loop so we don't change the thing we're iterating over
1886                             newdscf = DSCFile()
1887                             newdscf.source_id = source.source_id
1888                             newdscf.poolfile_id = newf.file_id
1889                             session.add(newdscf)
1890
1891                             toremove.append(df)
1892
1893                     for df in toremove:
1894                         session.delete(df)
1895
1896                     # Flush our changes
1897                     session.flush()
1898
1899                     # Make sure that our source object is up-to-date
1900                     session.expire(source)
1901
1902         # Add changelog information to the database
1903         self.store_changelog()
1904
1905         # Install the files into the pool
1906         for newfile, entry in self.pkg.files.items():
1907             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1908             utils.move(newfile, destination)
1909             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1910             stats.accept_bytes += float(entry["size"])
1911
1912         # Copy the .changes file across for suite which need it.
1913         copy_changes = {}
1914         for suite_name in self.pkg.changes["distribution"].keys():
1915             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1916                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1917
1918         for dest in copy_changes.keys():
1919             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1920
1921         # We're done - commit the database changes
1922         session.commit()
1923         # Our SQL session will automatically start a new transaction after
1924         # the last commit
1925
1926         # Move the .changes into the 'done' directory
1927         utils.move(self.pkg.changes_file,
1928                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1929
1930         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1931             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1932
1933         self.update_subst()
1934         self.Subst["__SUITE__"] = ""
1935         self.Subst["__SUMMARY__"] = summary
1936         mail_message = utils.TemplateSubst(self.Subst,
1937                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1938         utils.send_mail(mail_message)
1939         self.announce(short_summary, 1)
1940
1941         ## Helper stuff for DebBugs Version Tracking
1942         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1943             if self.pkg.changes["architecture"].has_key("source"):
1944                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1945                 version_history = os.fdopen(fd, 'w')
1946                 version_history.write(self.pkg.dsc["bts changelog"])
1947                 version_history.close()
1948                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1949                                       self.pkg.changes_file[:-8]+".versions")
1950                 os.rename(temp_filename, filename)
1951                 os.chmod(filename, 0644)
1952
1953             # Write out the binary -> source mapping.
1954             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1955             debinfo = os.fdopen(fd, 'w')
1956             for name, entry in sorted(self.pkg.files.items()):
1957                 if entry["type"] == "deb":
1958                     line = " ".join([entry["package"], entry["version"],
1959                                      entry["architecture"], entry["source package"],
1960                                      entry["source version"]])
1961                     debinfo.write(line+"\n")
1962             debinfo.close()
1963             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1964                                   self.pkg.changes_file[:-8]+".debinfo")
1965             os.rename(temp_filename, filename)
1966             os.chmod(filename, 0644)
1967
1968         session.commit()
1969
1970         # Set up our copy queues (e.g. buildd queues)
1971         for suite_name in self.pkg.changes["distribution"].keys():
1972             suite = get_suite(suite_name, session)
1973             for q in suite.copy_queues:
1974                 for f in poolfiles:
1975                     q.add_file_from_pool(f)
1976
1977         session.commit()
1978
1979         # Finally...
1980         stats.accept_count += 1
1981
1982     def check_override(self):
1983         """
1984         Checks override entries for validity. Mails "Override disparity" warnings,
1985         if that feature is enabled.
1986
1987         Abandons the check if
1988           - override disparity checks are disabled
1989           - mail sending is disabled
1990         """
1991
1992         cnf = Config()
1993
1994         # Abandon the check if override disparity checks have been disabled
1995         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1996             return
1997
1998         summary = self.pkg.check_override()
1999
2000         if summary == "":
2001             return
2002
2003         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2004
2005         self.update_subst()
2006         self.Subst["__SUMMARY__"] = summary
2007         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2008         utils.send_mail(mail_message)
2009         del self.Subst["__SUMMARY__"]
2010
2011     ###########################################################################
2012
2013     def remove(self, from_dir=None):
2014         """
2015         Used (for instance) in p-u to remove the package from unchecked
2016
2017         Also removes the package from holding area.
2018         """
2019         if from_dir is None:
2020             from_dir = self.pkg.directory
2021         h = Holding()
2022
2023         for f in self.pkg.files.keys():
2024             os.unlink(os.path.join(from_dir, f))
2025             if os.path.exists(os.path.join(h.holding_dir, f)):
2026                 os.unlink(os.path.join(h.holding_dir, f))
2027
2028         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2029         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2030             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2031
2032     ###########################################################################
2033
2034     def move_to_queue (self, queue):
2035         """
2036         Move files to a destination queue using the permissions in the table
2037         """
2038         h = Holding()
2039         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2040                    queue.path, perms=int(queue.change_perms, 8))
2041         for f in self.pkg.files.keys():
2042             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2043
2044     ###########################################################################
2045
2046     def force_reject(self, reject_files):
2047         """
2048         Forcefully move files from the current directory to the
2049         reject directory.  If any file already exists in the reject
2050         directory it will be moved to the morgue to make way for
2051         the new file.
2052
2053         @type reject_files: dict
2054         @param reject_files: file dictionary
2055
2056         """
2057
2058         cnf = Config()
2059
2060         for file_entry in reject_files:
2061             # Skip any files which don't exist or which we don't have permission to copy.
2062             if os.access(file_entry, os.R_OK) == 0:
2063                 continue
2064
2065             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2066
2067             try:
2068                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2069             except OSError, e:
2070                 # File exists?  Let's find a new name by adding a number
2071                 if e.errno == errno.EEXIST:
2072                     try:
2073                         dest_file = utils.find_next_free(dest_file, 255)
2074                     except NoFreeFilenameError:
2075                         # Something's either gone badly Pete Tong, or
2076                         # someone is trying to exploit us.
2077                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2078                         return
2079
2080                     # Make sure we really got it
2081                     try:
2082                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2083                     except OSError, e:
2084                         # Likewise
2085                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2086                         return
2087                 else:
2088                     raise
2089             # If we got here, we own the destination file, so we can
2090             # safely overwrite it.
2091             utils.move(file_entry, dest_file, 1, perms=0660)
2092             os.close(dest_fd)
2093
2094     ###########################################################################
2095     def do_reject (self, manual=0, reject_message="", notes=""):
2096         """
2097         Reject an upload. If called without a reject message or C{manual} is
2098         true, spawn an editor so the user can write one.
2099
2100         @type manual: bool
2101         @param manual: manual or automated rejection
2102
2103         @type reject_message: string
2104         @param reject_message: A reject message
2105
2106         @return: 0
2107
2108         """
2109         # If we weren't given a manual rejection message, spawn an
2110         # editor so the user can add one in...
2111         if manual and not reject_message:
2112             (fd, temp_filename) = utils.temp_filename()
2113             temp_file = os.fdopen(fd, 'w')
2114             if len(notes) > 0:
2115                 for note in notes:
2116                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2117                                     % (note.author, note.version, note.notedate, note.comment))
2118             temp_file.close()
2119             editor = os.environ.get("EDITOR","vi")
2120             answer = 'E'
2121             while answer == 'E':
2122                 os.system("%s %s" % (editor, temp_filename))
2123                 temp_fh = utils.open_file(temp_filename)
2124                 reject_message = "".join(temp_fh.readlines())
2125                 temp_fh.close()
2126                 print "Reject message:"
2127                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2128                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2129                 answer = "XXX"
2130                 while prompt.find(answer) == -1:
2131                     answer = utils.our_raw_input(prompt)
2132                     m = re_default_answer.search(prompt)
2133                     if answer == "":
2134                         answer = m.group(1)
2135                     answer = answer[:1].upper()
2136             os.unlink(temp_filename)
2137             if answer == 'A':
2138                 return 1
2139             elif answer == 'Q':
2140                 sys.exit(0)
2141
2142         print "Rejecting.\n"
2143
2144         cnf = Config()
2145
2146         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2147         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2148
2149         # Move all the files into the reject directory
2150         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2151         self.force_reject(reject_files)
2152
2153         # If we fail here someone is probably trying to exploit the race
2154         # so let's just raise an exception ...
2155         if os.path.exists(reason_filename):
2156             os.unlink(reason_filename)
2157         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2158
2159         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2160
2161         self.update_subst()
2162         if not manual:
2163             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2164             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2165             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2166             os.write(reason_fd, reject_message)
2167             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2168         else:
2169             # Build up the rejection email
2170             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2171             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2172             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2173             self.Subst["__REJECT_MESSAGE__"] = ""
2174             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2175             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2176             # Write the rejection email out as the <foo>.reason file
2177             os.write(reason_fd, reject_mail_message)
2178
2179         del self.Subst["__REJECTOR_ADDRESS__"]
2180         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2181         del self.Subst["__CC__"]
2182
2183         os.close(reason_fd)
2184
2185         # Send the rejection mail
2186         utils.send_mail(reject_mail_message)
2187
2188         if self.logger:
2189             self.logger.log(["rejected", self.pkg.changes_file])
2190
2191         return 0
2192
2193     ################################################################################
2194     def in_override_p(self, package, component, suite, binary_type, filename, session):
2195         """
2196         Check if a package already has override entries in the DB
2197
2198         @type package: string
2199         @param package: package name
2200
2201         @type component: string
2202         @param component: database id of the component
2203
2204         @type suite: int
2205         @param suite: database id of the suite
2206
2207         @type binary_type: string
2208         @param binary_type: type of the package
2209
2210         @type filename: string
2211         @param filename: filename we check
2212
2213         @return: the database result. But noone cares anyway.
2214
2215         """
2216
2217         cnf = Config()
2218
2219         if binary_type == "": # must be source
2220             file_type = "dsc"
2221         else:
2222             file_type = binary_type
2223
2224         # Override suite name; used for example with proposed-updates
2225         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2226             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2227
2228         result = get_override(package, suite, component, file_type, session)
2229
2230         # If checking for a source package fall back on the binary override type
2231         if file_type == "dsc" and len(result) < 1:
2232             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2233
2234         # Remember the section and priority so we can check them later if appropriate
2235         if len(result) > 0:
2236             result = result[0]
2237             self.pkg.files[filename]["override section"] = result.section.section
2238             self.pkg.files[filename]["override priority"] = result.priority.priority
2239             return result
2240
2241         return None
2242
2243     ################################################################################
2244     def get_anyversion(self, sv_list, suite):
2245         """
2246         @type sv_list: list
2247         @param sv_list: list of (suite, version) tuples to check
2248
2249         @type suite: string
2250         @param suite: suite name
2251
2252         Description: TODO
2253         """
2254         Cnf = Config()
2255         anyversion = None
2256         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2257         for (s, v) in sv_list:
2258             if s in [ x.lower() for x in anysuite ]:
2259                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2260                     anyversion = v
2261
2262         return anyversion
2263
2264     ################################################################################
2265
2266     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2267         """
2268         @type sv_list: list
2269         @param sv_list: list of (suite, version) tuples to check
2270
2271         @type filename: string
2272         @param filename: XXX
2273
2274         @type new_version: string
2275         @param new_version: XXX
2276
2277         Ensure versions are newer than existing packages in target
2278         suites and that cross-suite version checking rules as
2279         set out in the conf file are satisfied.
2280         """
2281
2282         cnf = Config()
2283
2284         # Check versions for each target suite
2285         for target_suite in self.pkg.changes["distribution"].keys():
2286             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2287             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2288
2289             # Enforce "must be newer than target suite" even if conffile omits it
2290             if target_suite not in must_be_newer_than:
2291                 must_be_newer_than.append(target_suite)
2292
2293             for (suite, existent_version) in sv_list:
2294                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2295
2296                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2297                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2298
2299                 if suite in must_be_older_than and vercmp > -1:
2300                     cansave = 0
2301
2302                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2303                         # we really use the other suite, ignoring the conflicting one ...
2304                         addsuite = self.pkg.changes["distribution-version"][suite]
2305
2306                         add_version = self.get_anyversion(sv_list, addsuite)
2307                         target_version = self.get_anyversion(sv_list, target_suite)
2308
2309                         if not add_version:
2310                             # not add_version can only happen if we map to a suite
2311                             # that doesn't enhance the suite we're propup'ing from.
2312                             # so "propup-ver x a b c; map a d" is a problem only if
2313                             # d doesn't enhance a.
2314                             #
2315                             # i think we could always propagate in this case, rather
2316                             # than complaining. either way, this isn't a REJECT issue
2317                             #
2318                             # And - we really should complain to the dorks who configured dak
2319                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2320                             self.pkg.changes.setdefault("propdistribution", {})
2321                             self.pkg.changes["propdistribution"][addsuite] = 1
2322                             cansave = 1
2323                         elif not target_version:
2324                             # not targets_version is true when the package is NEW
2325                             # we could just stick with the "...old version..." REJECT
2326                             # for this, I think.
2327                             self.rejects.append("Won't propogate NEW packages.")
2328                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2329                             # propogation would be redundant. no need to reject though.
2330                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2331                             cansave = 1
2332                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2333                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2334                             # propogate!!
2335                             self.warnings.append("Propogating upload to %s" % (addsuite))
2336                             self.pkg.changes.setdefault("propdistribution", {})
2337                             self.pkg.changes["propdistribution"][addsuite] = 1
2338                             cansave = 1
2339
2340                     if not cansave:
2341                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2342
2343     ################################################################################
2344     def check_binary_against_db(self, filename, session):
2345         # Ensure version is sane
2346         q = session.query(BinAssociation)
2347         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2348         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2349
2350         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2351                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2352
2353         # Check for any existing copies of the file
2354         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2355         q = q.filter_by(version=self.pkg.files[filename]["version"])
2356         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2357
2358         if q.count() > 0:
2359             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2360
2361     ################################################################################
2362
2363     def check_source_against_db(self, filename, session):
2364         source = self.pkg.dsc.get("source")
2365         version = self.pkg.dsc.get("version")
2366
2367         # Ensure version is sane
2368         q = session.query(SrcAssociation)
2369         q = q.join(DBSource).filter(DBSource.source==source)
2370
2371         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2372                                        filename, version, sourceful=True)
2373
2374     ################################################################################
2375     def check_dsc_against_db(self, filename, session):
2376         """
2377
2378         @warning: NB: this function can remove entries from the 'files' index [if
2379          the orig tarball is a duplicate of the one in the archive]; if
2380          you're iterating over 'files' and call this function as part of
2381          the loop, be sure to add a check to the top of the loop to
2382          ensure you haven't just tried to dereference the deleted entry.
2383
2384         """
2385
2386         Cnf = Config()
2387         self.pkg.orig_files = {} # XXX: do we need to clear it?
2388         orig_files = self.pkg.orig_files
2389
2390         # Try and find all files mentioned in the .dsc.  This has
2391         # to work harder to cope with the multiple possible
2392         # locations of an .orig.tar.gz.
2393         # The ordering on the select is needed to pick the newest orig
2394         # when it exists in multiple places.
2395         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2396             found = None
2397             if self.pkg.files.has_key(dsc_name):
2398                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2399                 actual_size = int(self.pkg.files[dsc_name]["size"])
2400                 found = "%s in incoming" % (dsc_name)
2401
2402                 # Check the file does not already exist in the archive
2403                 ql = get_poolfile_like_name(dsc_name, session)
2404
2405                 # Strip out anything that isn't '%s' or '/%s$'
2406                 for i in ql:
2407                     if not i.filename.endswith(dsc_name):
2408                         ql.remove(i)
2409
2410                 # "[dak] has not broken them.  [dak] has fixed a
2411                 # brokenness.  Your crappy hack exploited a bug in
2412                 # the old dinstall.
2413                 #
2414                 # "(Come on!  I thought it was always obvious that
2415                 # one just doesn't release different files with
2416                 # the same name and version.)"
2417                 #                        -- ajk@ on d-devel@l.d.o
2418
2419                 if len(ql) > 0:
2420                     # Ignore exact matches for .orig.tar.gz
2421                     match = 0
2422                     if re_is_orig_source.match(dsc_name):
2423                         for i in ql:
2424                             if self.pkg.files.has_key(dsc_name) and \
2425                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2426                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2427                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2428                                 # TODO: Don't delete the entry, just mark it as not needed
2429                                 # This would fix the stupidity of changing something we often iterate over
2430                                 # whilst we're doing it
2431                                 del self.pkg.files[dsc_name]
2432                                 dsc_entry["files id"] = i.file_id
2433                                 if not orig_files.has_key(dsc_name):
2434                                     orig_files[dsc_name] = {}
2435                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2436                                 match = 1
2437
2438                                 # Don't bitch that we couldn't find this file later
2439                                 try:
2440                                     self.later_check_files.remove(dsc_name)
2441                                 except ValueError:
2442                                     pass
2443
2444
2445                     if not match:
2446                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2447
2448             elif re_is_orig_source.match(dsc_name):
2449                 # Check in the pool
2450                 ql = get_poolfile_like_name(dsc_name, session)
2451
2452                 # Strip out anything that isn't '%s' or '/%s$'
2453                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2454                 for i in ql:
2455                     if not i.filename.endswith(dsc_name):
2456                         ql.remove(i)
2457
2458                 if len(ql) > 0:
2459                     # Unfortunately, we may get more than one match here if,
2460                     # for example, the package was in potato but had an -sa
2461                     # upload in woody.  So we need to choose the right one.
2462
2463                     # default to something sane in case we don't match any or have only one
2464                     x = ql[0]
2465
2466                     if len(ql) > 1:
2467                         for i in ql:
2468                             old_file = os.path.join(i.location.path, i.filename)
2469                             old_file_fh = utils.open_file(old_file)
2470                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2471                             old_file_fh.close()
2472                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2473                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2474                                 x = i
2475
2476                     old_file = os.path.join(i.location.path, i.filename)
2477                     old_file_fh = utils.open_file(old_file)
2478                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2479                     old_file_fh.close()
2480                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2481                     found = old_file
2482                     suite_type = x.location.archive_type
2483                     # need this for updating dsc_files in install()
2484                     dsc_entry["files id"] = x.file_id
2485                     # See install() in process-accepted...
2486                     if not orig_files.has_key(dsc_name):
2487                         orig_files[dsc_name] = {}
2488                     orig_files[dsc_name]["id"] = x.file_id
2489                     orig_files[dsc_name]["path"] = old_file
2490                     orig_files[dsc_name]["location"] = x.location.location_id
2491                 else:
2492                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2493                     # Not there? Check the queue directories...
2494                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2495                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2496                             continue
2497                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2498                         if os.path.exists(in_otherdir):
2499                             in_otherdir_fh = utils.open_file(in_otherdir)
2500                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2501                             in_otherdir_fh.close()
2502                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2503                             found = in_otherdir
2504                             if not orig_files.has_key(dsc_name):
2505                                 orig_files[dsc_name] = {}
2506                             orig_files[dsc_name]["path"] = in_otherdir
2507
2508                     if not found:
2509                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2510                         continue
2511             else:
2512                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2513                 continue
2514             if actual_md5 != dsc_entry["md5sum"]:
2515                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2516             if actual_size != int(dsc_entry["size"]):
2517                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2518
2519     ################################################################################
2520     # This is used by process-new and process-holding to recheck a changes file
2521     # at the time we're running.  It mainly wraps various other internal functions
2522     # and is similar to accepted_checks - these should probably be tidied up
2523     # and combined
2524     def recheck(self, session):
2525         cnf = Config()
2526         for f in self.pkg.files.keys():
2527             # The .orig.tar.gz can disappear out from under us is it's a
2528             # duplicate of one in the archive.
2529             if not self.pkg.files.has_key(f):
2530                 continue
2531
2532             entry = self.pkg.files[f]
2533
2534             # Check that the source still exists
2535             if entry["type"] == "deb":
2536                 source_version = entry["source version"]
2537                 source_package = entry["source package"]
2538                 if not self.pkg.changes["architecture"].has_key("source") \
2539                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2540                     source_epochless_version = re_no_epoch.sub('', source_version)
2541                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2542                     found = False
2543                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2544                         if cnf.has_key("Dir::Queue::%s" % (q)):
2545                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2546                                 found = True
2547                     if not found:
2548                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2549
2550             # Version and file overwrite checks
2551             if entry["type"] == "deb":
2552                 self.check_binary_against_db(f, session)
2553             elif entry["type"] == "dsc":
2554                 self.check_source_against_db(f, session)
2555                 self.check_dsc_against_db(f, session)
2556
2557     ################################################################################
2558     def accepted_checks(self, overwrite_checks, session):
2559         # Recheck anything that relies on the database; since that's not
2560         # frozen between accept and our run time when called from p-a.
2561
2562         # overwrite_checks is set to False when installing to stable/oldstable
2563
2564         propogate={}
2565         nopropogate={}
2566
2567         # Find the .dsc (again)
2568         dsc_filename = None
2569         for f in self.pkg.files.keys():
2570             if self.pkg.files[f]["type"] == "dsc":
2571                 dsc_filename = f
2572
2573         for checkfile in self.pkg.files.keys():
2574             # The .orig.tar.gz can disappear out from under us is it's a
2575             # duplicate of one in the archive.
2576             if not self.pkg.files.has_key(checkfile):
2577                 continue
2578
2579             entry = self.pkg.files[checkfile]
2580
2581             # Check that the source still exists
2582             if entry["type"] == "deb":
2583                 source_version = entry["source version"]
2584                 source_package = entry["source package"]
2585                 if not self.pkg.changes["architecture"].has_key("source") \
2586                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2587                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2588
2589             # Version and file overwrite checks
2590             if overwrite_checks:
2591                 if entry["type"] == "deb":
2592                     self.check_binary_against_db(checkfile, session)
2593                 elif entry["type"] == "dsc":
2594                     self.check_source_against_db(checkfile, session)
2595                     self.check_dsc_against_db(dsc_filename, session)
2596
2597             # propogate in the case it is in the override tables:
2598             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2599                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2600                     propogate[suite] = 1
2601                 else:
2602                     nopropogate[suite] = 1
2603
2604         for suite in propogate.keys():
2605             if suite in nopropogate:
2606                 continue
2607             self.pkg.changes["distribution"][suite] = 1
2608
2609         for checkfile in self.pkg.files.keys():
2610             # Check the package is still in the override tables
2611             for suite in self.pkg.changes["distribution"].keys():
2612                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2613                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2614
2615     ################################################################################
2616     # This is not really a reject, but an unaccept, but since a) the code for
2617     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2618     # extremely rare, for now we'll go with whining at our admin folks...
2619
2620     def do_unaccept(self):
2621         cnf = Config()
2622
2623         self.update_subst()
2624         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2625         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2626         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2627         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2628         if cnf.has_key("Dinstall::Bcc"):
2629             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2630
2631         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2632
2633         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2634
2635         # Write the rejection email out as the <foo>.reason file
2636         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2637         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2638
2639         # If we fail here someone is probably trying to exploit the race
2640         # so let's just raise an exception ...
2641         if os.path.exists(reject_filename):
2642             os.unlink(reject_filename)
2643
2644         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2645         os.write(fd, reject_mail_message)
2646         os.close(fd)
2647
2648         utils.send_mail(reject_mail_message)
2649
2650         del self.Subst["__REJECTOR_ADDRESS__"]
2651         del self.Subst["__REJECT_MESSAGE__"]
2652         del self.Subst["__CC__"]
2653
2654     ################################################################################
2655     # If any file of an upload has a recent mtime then chances are good
2656     # the file is still being uploaded.
2657
2658     def upload_too_new(self):
2659         cnf = Config()
2660         too_new = False
2661         # Move back to the original directory to get accurate time stamps
2662         cwd = os.getcwd()
2663         os.chdir(self.pkg.directory)
2664         file_list = self.pkg.files.keys()
2665         file_list.extend(self.pkg.dsc_files.keys())
2666         file_list.append(self.pkg.changes_file)
2667         for f in file_list:
2668             try:
2669                 last_modified = time.time()-os.path.getmtime(f)
2670                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2671                     too_new = True
2672                     break
2673             except:
2674                 pass
2675
2676         os.chdir(cwd)
2677         return too_new
2678
2679     def store_changelog(self):
2680
2681         # Skip binary-only upload if it is not a bin-NMU
2682         if not self.pkg.changes['architecture'].has_key('source'):
2683             from daklib.regexes import re_bin_only_nmu
2684             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2685                 return
2686
2687         session = DBConn().session()
2688
2689         # Check if upload already has a changelog entry
2690         query = """SELECT changelog_id FROM changes WHERE source = :source
2691                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2692         if session.execute(query, {'source': self.pkg.changes['source'], \
2693                                    'version': self.pkg.changes['version'], \
2694                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2695             session.commit()
2696             return
2697
2698         # Add current changelog text into changelogs_text table, return created ID
2699         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2700         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2701
2702         # Link ID to the upload available in changes table
2703         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2704                    AND version = :version AND architecture = :architecture"""
2705         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2706                                 'version': self.pkg.changes['version'], \
2707                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2708
2709         session.commit()