]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
7ec5f094457ad65f45b94a6cafed4f7aaf2fb936
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113
114     # Build up a list of potentially new things
115     for name, f in files.items():
116         # Skip byhand elements
117 #        if f["type"] == "byhand":
118 #            continue
119         pkg = f["package"]
120         priority = f["priority"]
121         section = f["section"]
122         file_type = get_type(f, session)
123         component = f["component"]
124
125         if file_type == "dsc":
126             priority = "source"
127
128         if not new.has_key(pkg):
129             new[pkg] = {}
130             new[pkg]["priority"] = priority
131             new[pkg]["section"] = section
132             new[pkg]["type"] = file_type
133             new[pkg]["component"] = component
134             new[pkg]["files"] = []
135         else:
136             old_type = new[pkg]["type"]
137             if old_type != file_type:
138                 # source gets trumped by deb or udeb
139                 if old_type == "dsc":
140                     new[pkg]["priority"] = priority
141                     new[pkg]["section"] = section
142                     new[pkg]["type"] = file_type
143                     new[pkg]["component"] = component
144
145         new[pkg]["files"].append(name)
146
147         if f.has_key("othercomponents"):
148             new[pkg]["othercomponents"] = f["othercomponents"]
149
150     # Fix up the list of target suites
151     cnf = Config()
152     for suite in changes["suite"].keys():
153         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154         if override:
155             (olderr, newerr) = (get_suite(suite, session) == None,
156                                 get_suite(override, session) == None)
157             if olderr or newerr:
158                 (oinv, newinv) = ("", "")
159                 if olderr: oinv = "invalid "
160                 if newerr: ninv = "invalid "
161                 print "warning: overriding %ssuite %s to %ssuite %s" % (
162                         oinv, suite, ninv, override)
163             del changes["suite"][suite]
164             changes["suite"][override] = 1
165
166     for suite in changes["suite"].keys():
167         for pkg in new.keys():
168             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169             if len(ql) > 0:
170                 for file_entry in new[pkg]["files"]:
171                     if files[file_entry].has_key("new"):
172                         del files[file_entry]["new"]
173                 del new[pkg]
174
175     if warn:
176         for s in ['stable', 'oldstable']:
177             if changes["suite"].has_key(s):
178                 print "WARNING: overrides will be added for %s!" % s
179         for pkg in new.keys():
180             if new[pkg].has_key("othercomponents"):
181                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
182
183     return new
184
185 ################################################################################
186
187 def check_valid(new, session = None):
188     """
189     Check if section and priority for NEW packages exist in database.
190     Additionally does sanity checks:
191       - debian-installer packages have to be udeb (or source)
192       - non debian-installer packages can not be udeb
193       - source priority can only be assigned to dsc file types
194
195     @type new: dict
196     @param new: Dict of new packages with their section, priority and type.
197
198     """
199     for pkg in new.keys():
200         section_name = new[pkg]["section"]
201         priority_name = new[pkg]["priority"]
202         file_type = new[pkg]["type"]
203
204         section = get_section(section_name, session)
205         if section is None:
206             new[pkg]["section id"] = -1
207         else:
208             new[pkg]["section id"] = section.section_id
209
210         priority = get_priority(priority_name, session)
211         if priority is None:
212             new[pkg]["priority id"] = -1
213         else:
214             new[pkg]["priority id"] = priority.priority_id
215
216         # Sanity checks
217         di = section_name.find("debian-installer") != -1
218
219         # If d-i, we must be udeb and vice-versa
220         if     (di and file_type not in ("udeb", "dsc")) or \
221            (not di and file_type == "udeb"):
222             new[pkg]["section id"] = -1
223
224         # If dsc we need to be source and vice-versa
225         if (priority == "source" and file_type != "dsc") or \
226            (priority != "source" and file_type == "dsc"):
227             new[pkg]["priority id"] = -1
228
229 ###############################################################################
230
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233     def __init__(self, future_cutoff, past_cutoff):
234         self.reset()
235         self.future_cutoff = future_cutoff
236         self.past_cutoff = past_cutoff
237
238     def reset(self):
239         self.future_files = {}
240         self.ancient_files = {}
241
242     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243         if MTime > self.future_cutoff:
244             self.future_files[Name] = MTime
245         if MTime < self.past_cutoff:
246             self.ancient_files[Name] = MTime
247
248 ###############################################################################
249
250 class Upload(object):
251     """
252     Everything that has to do with an upload processed.
253
254     """
255     def __init__(self):
256         self.logger = None
257         self.pkg = Changes()
258         self.reset()
259
260     ###########################################################################
261
262     def reset (self):
263         """ Reset a number of internal variables."""
264
265         # Initialize the substitution template map
266         cnf = Config()
267         self.Subst = {}
268         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
272
273         self.rejects = []
274         self.warnings = []
275         self.notes = []
276
277         self.later_check_files = []
278
279         self.pkg.reset()
280
281     def package_info(self):
282         """
283         Format various messages from this Upload to send to the maintainer.
284         """
285
286         msgs = (
287             ('Reject Reasons', self.rejects),
288             ('Warnings', self.warnings),
289             ('Notes', self.notes),
290         )
291
292         msg = ''
293         for title, messages in msgs:
294             if messages:
295                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296         msg += '\n\n'
297
298         return msg
299
300     ###########################################################################
301     def update_subst(self):
302         """ Set up the per-package template substitution mappings """
303
304         cnf = Config()
305
306         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307         if not self.pkg.changes.has_key("architecture") or not \
308            isinstance(self.pkg.changes["architecture"], dict):
309             self.pkg.changes["architecture"] = { "Unknown" : "" }
310
311         # and maintainer2047 may not exist.
312         if not self.pkg.changes.has_key("maintainer2047"):
313             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
314
315         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
318
319         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320         if self.pkg.changes["architecture"].has_key("source") and \
321            self.pkg.changes["changedby822"] != "" and \
322            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
323
324             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
327         else:
328             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
331
332         # Process policy doesn't set the fingerprint field and I don't want to make it
333         # do it for now as I don't want to have to deal with the case where we accepted
334         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
335         # the meantime so the package will be remarked as rejectable.  Urgh.
336         # TODO: Fix this properly
337         if self.pkg.changes.has_key('fingerprint'):
338             session = DBConn().session()
339             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
340             if self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
341                 if self.pkg.changes.has_key("sponsoremail"):
342                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
343             session.close()
344
345         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
346             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
347
348         # Apply any global override of the Maintainer field
349         if cnf.get("Dinstall::OverrideMaintainer"):
350             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
351             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
352
353         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
354         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
355         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
356
357     ###########################################################################
358     def load_changes(self, filename):
359         """
360         Load a changes file and setup a dictionary around it. Also checks for mandantory
361         fields  within.
362
363         @type filename: string
364         @param filename: Changes filename, full path.
365
366         @rtype: boolean
367         @return: whether the changes file was valid or not.  We may want to
368                  reject even if this is True (see what gets put in self.rejects).
369                  This is simply to prevent us even trying things later which will
370                  fail because we couldn't properly parse the file.
371         """
372         Cnf = Config()
373         self.pkg.changes_file = filename
374
375         # Parse the .changes field into a dictionary
376         try:
377             self.pkg.changes.update(parse_changes(filename))
378         except CantOpenError:
379             self.rejects.append("%s: can't read file." % (filename))
380             return False
381         except ParseChangesError, line:
382             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
383             return False
384         except ChangesUnicodeError:
385             self.rejects.append("%s: changes file not proper utf-8" % (filename))
386             return False
387
388         # Parse the Files field from the .changes into another dictionary
389         try:
390             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
391         except ParseChangesError, line:
392             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
393             return False
394         except UnknownFormatError, format:
395             self.rejects.append("%s: unknown format '%s'." % (filename, format))
396             return False
397
398         # Check for mandatory fields
399         for i in ("distribution", "source", "binary", "architecture",
400                   "version", "maintainer", "files", "changes", "description"):
401             if not self.pkg.changes.has_key(i):
402                 # Avoid undefined errors later
403                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
404                 return False
405
406         # Strip a source version in brackets from the source field
407         if re_strip_srcver.search(self.pkg.changes["source"]):
408             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
409
410         # Ensure the source field is a valid package name.
411         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
412             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
413
414         # Split multi-value fields into a lower-level dictionary
415         for i in ("architecture", "distribution", "binary", "closes"):
416             o = self.pkg.changes.get(i, "")
417             if o != "":
418                 del self.pkg.changes[i]
419
420             self.pkg.changes[i] = {}
421
422             for j in o.split():
423                 self.pkg.changes[i][j] = 1
424
425         # Fix the Maintainer: field to be RFC822/2047 compatible
426         try:
427             (self.pkg.changes["maintainer822"],
428              self.pkg.changes["maintainer2047"],
429              self.pkg.changes["maintainername"],
430              self.pkg.changes["maintaineremail"]) = \
431                    fix_maintainer (self.pkg.changes["maintainer"])
432         except ParseMaintError, msg:
433             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
434                    % (filename, self.pkg.changes["maintainer"], msg))
435
436         # ...likewise for the Changed-By: field if it exists.
437         try:
438             (self.pkg.changes["changedby822"],
439              self.pkg.changes["changedby2047"],
440              self.pkg.changes["changedbyname"],
441              self.pkg.changes["changedbyemail"]) = \
442                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
443         except ParseMaintError, msg:
444             self.pkg.changes["changedby822"] = ""
445             self.pkg.changes["changedby2047"] = ""
446             self.pkg.changes["changedbyname"] = ""
447             self.pkg.changes["changedbyemail"] = ""
448
449             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
450                    % (filename, self.pkg.changes["changed-by"], msg))
451
452         # Ensure all the values in Closes: are numbers
453         if self.pkg.changes.has_key("closes"):
454             for i in self.pkg.changes["closes"].keys():
455                 if re_isanum.match (i) == None:
456                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
457
458         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
459         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
460         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
461
462         # Check the .changes is non-empty
463         if not self.pkg.files:
464             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
465             return False
466
467         # Changes was syntactically valid even if we'll reject
468         return True
469
470     ###########################################################################
471
472     def check_distributions(self):
473         "Check and map the Distribution field"
474
475         Cnf = Config()
476
477         # Handle suite mappings
478         for m in Cnf.ValueList("SuiteMappings"):
479             args = m.split()
480             mtype = args[0]
481             if mtype == "map" or mtype == "silent-map":
482                 (source, dest) = args[1:3]
483                 if self.pkg.changes["distribution"].has_key(source):
484                     del self.pkg.changes["distribution"][source]
485                     self.pkg.changes["distribution"][dest] = 1
486                     if mtype != "silent-map":
487                         self.notes.append("Mapping %s to %s." % (source, dest))
488                 if self.pkg.changes.has_key("distribution-version"):
489                     if self.pkg.changes["distribution-version"].has_key(source):
490                         self.pkg.changes["distribution-version"][source]=dest
491             elif mtype == "map-unreleased":
492                 (source, dest) = args[1:3]
493                 if self.pkg.changes["distribution"].has_key(source):
494                     for arch in self.pkg.changes["architecture"].keys():
495                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
496                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
497                             del self.pkg.changes["distribution"][source]
498                             self.pkg.changes["distribution"][dest] = 1
499                             break
500             elif mtype == "ignore":
501                 suite = args[1]
502                 if self.pkg.changes["distribution"].has_key(suite):
503                     del self.pkg.changes["distribution"][suite]
504                     self.warnings.append("Ignoring %s as a target suite." % (suite))
505             elif mtype == "reject":
506                 suite = args[1]
507                 if self.pkg.changes["distribution"].has_key(suite):
508                     self.rejects.append("Uploads to %s are not accepted." % (suite))
509             elif mtype == "propup-version":
510                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
511                 #
512                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
513                 if self.pkg.changes["distribution"].has_key(args[1]):
514                     self.pkg.changes.setdefault("distribution-version", {})
515                     for suite in args[2:]:
516                         self.pkg.changes["distribution-version"][suite] = suite
517
518         # Ensure there is (still) a target distribution
519         if len(self.pkg.changes["distribution"].keys()) < 1:
520             self.rejects.append("No valid distribution remaining.")
521
522         # Ensure target distributions exist
523         for suite in self.pkg.changes["distribution"].keys():
524             if not Cnf.has_key("Suite::%s" % (suite)):
525                 self.rejects.append("Unknown distribution `%s'." % (suite))
526
527     ###########################################################################
528
529     def binary_file_checks(self, f, session):
530         cnf = Config()
531         entry = self.pkg.files[f]
532
533         # Extract package control information
534         deb_file = utils.open_file(f)
535         try:
536             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
537         except:
538             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
539             deb_file.close()
540             # Can't continue, none of the checks on control would work.
541             return
542
543         # Check for mandantory "Description:"
544         deb_file.seek(0)
545         try:
546             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
547         except:
548             self.rejects.append("%s: Missing Description in binary package" % (f))
549             return
550
551         deb_file.close()
552
553         # Check for mandatory fields
554         for field in [ "Package", "Architecture", "Version" ]:
555             if control.Find(field) == None:
556                 # Can't continue
557                 self.rejects.append("%s: No %s field in control." % (f, field))
558                 return
559
560         # Ensure the package name matches the one give in the .changes
561         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
562             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
563
564         # Validate the package field
565         package = control.Find("Package")
566         if not re_valid_pkg_name.match(package):
567             self.rejects.append("%s: invalid package name '%s'." % (f, package))
568
569         # Validate the version field
570         version = control.Find("Version")
571         if not re_valid_version.match(version):
572             self.rejects.append("%s: invalid version number '%s'." % (f, version))
573
574         # Ensure the architecture of the .deb is one we know about.
575         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
576         architecture = control.Find("Architecture")
577         upload_suite = self.pkg.changes["distribution"].keys()[0]
578
579         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
580             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
581             self.rejects.append("Unknown architecture '%s'." % (architecture))
582
583         # Ensure the architecture of the .deb is one of the ones
584         # listed in the .changes.
585         if not self.pkg.changes["architecture"].has_key(architecture):
586             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
587
588         # Sanity-check the Depends field
589         depends = control.Find("Depends")
590         if depends == '':
591             self.rejects.append("%s: Depends field is empty." % (f))
592
593         # Sanity-check the Provides field
594         provides = control.Find("Provides")
595         if provides:
596             provide = re_spacestrip.sub('', provides)
597             if provide == '':
598                 self.rejects.append("%s: Provides field is empty." % (f))
599             prov_list = provide.split(",")
600             for prov in prov_list:
601                 if not re_valid_pkg_name.match(prov):
602                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
603
604         # Check the section & priority match those given in the .changes (non-fatal)
605         if     control.Find("Section") and entry["section"] != "" \
606            and entry["section"] != control.Find("Section"):
607             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
608                                 (f, control.Find("Section", ""), entry["section"]))
609         if control.Find("Priority") and entry["priority"] != "" \
610            and entry["priority"] != control.Find("Priority"):
611             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
612                                 (f, control.Find("Priority", ""), entry["priority"]))
613
614         entry["package"] = package
615         entry["architecture"] = architecture
616         entry["version"] = version
617         entry["maintainer"] = control.Find("Maintainer", "")
618
619         if f.endswith(".udeb"):
620             self.pkg.files[f]["dbtype"] = "udeb"
621         elif f.endswith(".deb"):
622             self.pkg.files[f]["dbtype"] = "deb"
623         else:
624             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
625
626         entry["source"] = control.Find("Source", entry["package"])
627
628         # Get the source version
629         source = entry["source"]
630         source_version = ""
631
632         if source.find("(") != -1:
633             m = re_extract_src_version.match(source)
634             source = m.group(1)
635             source_version = m.group(2)
636
637         if not source_version:
638             source_version = self.pkg.files[f]["version"]
639
640         entry["source package"] = source
641         entry["source version"] = source_version
642
643         # Ensure the filename matches the contents of the .deb
644         m = re_isadeb.match(f)
645
646         #  package name
647         file_package = m.group(1)
648         if entry["package"] != file_package:
649             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
650                                 (f, file_package, entry["dbtype"], entry["package"]))
651         epochless_version = re_no_epoch.sub('', control.Find("Version"))
652
653         #  version
654         file_version = m.group(2)
655         if epochless_version != file_version:
656             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
657                                 (f, file_version, entry["dbtype"], epochless_version))
658
659         #  architecture
660         file_architecture = m.group(3)
661         if entry["architecture"] != file_architecture:
662             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
663                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
664
665         # Check for existent source
666         source_version = entry["source version"]
667         source_package = entry["source package"]
668         if self.pkg.changes["architecture"].has_key("source"):
669             if source_version != self.pkg.changes["version"]:
670                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
671                                     (source_version, f, self.pkg.changes["version"]))
672         else:
673             # Check in the SQL database
674             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
675                 # Check in one of the other directories
676                 source_epochless_version = re_no_epoch.sub('', source_version)
677                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
678                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
679                     entry["byhand"] = 1
680                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
681                     entry["new"] = 1
682                 else:
683                     dsc_file_exists = False
684                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
685                         if cnf.has_key("Dir::Queue::%s" % (myq)):
686                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
687                                 dsc_file_exists = True
688                                 break
689
690                     if not dsc_file_exists:
691                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
692
693         # Check the version and for file overwrites
694         self.check_binary_against_db(f, session)
695
696         # Temporarily disable contents generation until we change the table storage layout
697         #b = Binary(f)
698         #b.scan_package()
699         #if len(b.rejects) > 0:
700         #    for j in b.rejects:
701         #        self.rejects.append(j)
702
703     def source_file_checks(self, f, session):
704         entry = self.pkg.files[f]
705
706         m = re_issource.match(f)
707         if not m:
708             return
709
710         entry["package"] = m.group(1)
711         entry["version"] = m.group(2)
712         entry["type"] = m.group(3)
713
714         # Ensure the source package name matches the Source filed in the .changes
715         if self.pkg.changes["source"] != entry["package"]:
716             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
717
718         # Ensure the source version matches the version in the .changes file
719         if re_is_orig_source.match(f):
720             changes_version = self.pkg.changes["chopversion2"]
721         else:
722             changes_version = self.pkg.changes["chopversion"]
723
724         if changes_version != entry["version"]:
725             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
726
727         # Ensure the .changes lists source in the Architecture field
728         if not self.pkg.changes["architecture"].has_key("source"):
729             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
730
731         # Check the signature of a .dsc file
732         if entry["type"] == "dsc":
733             # check_signature returns either:
734             #  (None, [list, of, rejects]) or (signature, [])
735             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
736             for j in rejects:
737                 self.rejects.append(j)
738
739         entry["architecture"] = "source"
740
741     def per_suite_file_checks(self, f, suite, session):
742         cnf = Config()
743         entry = self.pkg.files[f]
744
745         # Skip byhand
746         if entry.has_key("byhand"):
747             return
748
749         # Check we have fields we need to do these checks
750         oktogo = True
751         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
752             if not entry.has_key(m):
753                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
754                 oktogo = False
755
756         if not oktogo:
757             return
758
759         # Handle component mappings
760         for m in cnf.ValueList("ComponentMappings"):
761             (source, dest) = m.split()
762             if entry["component"] == source:
763                 entry["original component"] = source
764                 entry["component"] = dest
765
766         # Ensure the component is valid for the target suite
767         if cnf.has_key("Suite:%s::Components" % (suite)) and \
768            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
769             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
770             return
771
772         # Validate the component
773         if not get_component(entry["component"], session):
774             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
775             return
776
777         # See if the package is NEW
778         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
779             entry["new"] = 1
780
781         # Validate the priority
782         if entry["priority"].find('/') != -1:
783             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
784
785         # Determine the location
786         location = cnf["Dir::Pool"]
787         l = get_location(location, entry["component"], session=session)
788         if l is None:
789             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
790             entry["location id"] = -1
791         else:
792             entry["location id"] = l.location_id
793
794         # Check the md5sum & size against existing files (if any)
795         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
796
797         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
798                                          entry["size"], entry["md5sum"], entry["location id"])
799
800         if found is None:
801             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
802         elif found is False and poolfile is not None:
803             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
804         else:
805             if poolfile is None:
806                 entry["files id"] = None
807             else:
808                 entry["files id"] = poolfile.file_id
809
810         # Check for packages that have moved from one component to another
811         entry['suite'] = suite
812         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
813         if res.rowcount > 0:
814             entry["othercomponents"] = res.fetchone()[0]
815
816     def check_files(self, action=True):
817         file_keys = self.pkg.files.keys()
818         holding = Holding()
819         cnf = Config()
820
821         if action:
822             cwd = os.getcwd()
823             os.chdir(self.pkg.directory)
824             for f in file_keys:
825                 ret = holding.copy_to_holding(f)
826                 if ret is not None:
827                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
828
829             os.chdir(cwd)
830
831         # check we already know the changes file
832         # [NB: this check must be done post-suite mapping]
833         base_filename = os.path.basename(self.pkg.changes_file)
834
835         session = DBConn().session()
836
837         try:
838             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
839             # if in the pool or in a queue other than unchecked, reject
840             if (dbc.in_queue is None) \
841                    or (dbc.in_queue is not None
842                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
843                 self.rejects.append("%s file already known to dak" % base_filename)
844         except NoResultFound, e:
845             # not known, good
846             pass
847
848         has_binaries = False
849         has_source = False
850
851         for f, entry in self.pkg.files.items():
852             # Ensure the file does not already exist in one of the accepted directories
853             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
854                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
855                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
856                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
857
858             if not re_taint_free.match(f):
859                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
860
861             # Check the file is readable
862             if os.access(f, os.R_OK) == 0:
863                 # When running in -n, copy_to_holding() won't have
864                 # generated the reject_message, so we need to.
865                 if action:
866                     if os.path.exists(f):
867                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
868                     else:
869                         # Don't directly reject, mark to check later to deal with orig's
870                         # we can find in the pool
871                         self.later_check_files.append(f)
872                 entry["type"] = "unreadable"
873                 continue
874
875             # If it's byhand skip remaining checks
876             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
877                 entry["byhand"] = 1
878                 entry["type"] = "byhand"
879
880             # Checks for a binary package...
881             elif re_isadeb.match(f):
882                 has_binaries = True
883                 entry["type"] = "deb"
884
885                 # This routine appends to self.rejects/warnings as appropriate
886                 self.binary_file_checks(f, session)
887
888             # Checks for a source package...
889             elif re_issource.match(f):
890                 has_source = True
891
892                 # This routine appends to self.rejects/warnings as appropriate
893                 self.source_file_checks(f, session)
894
895             # Not a binary or source package?  Assume byhand...
896             else:
897                 entry["byhand"] = 1
898                 entry["type"] = "byhand"
899
900             # Per-suite file checks
901             entry["oldfiles"] = {}
902             for suite in self.pkg.changes["distribution"].keys():
903                 self.per_suite_file_checks(f, suite, session)
904
905         session.close()
906
907         # If the .changes file says it has source, it must have source.
908         if self.pkg.changes["architecture"].has_key("source"):
909             if not has_source:
910                 self.rejects.append("no source found and Architecture line in changes mention source.")
911
912             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
913                 self.rejects.append("source only uploads are not supported.")
914
915     ###########################################################################
916     def check_dsc(self, action=True, session=None):
917         """Returns bool indicating whether or not the source changes are valid"""
918         # Ensure there is source to check
919         if not self.pkg.changes["architecture"].has_key("source"):
920             return True
921
922         # Find the .dsc
923         dsc_filename = None
924         for f, entry in self.pkg.files.items():
925             if entry["type"] == "dsc":
926                 if dsc_filename:
927                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
928                     return False
929                 else:
930                     dsc_filename = f
931
932         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
933         if not dsc_filename:
934             self.rejects.append("source uploads must contain a dsc file")
935             return False
936
937         # Parse the .dsc file
938         try:
939             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
940         except CantOpenError:
941             # if not -n copy_to_holding() will have done this for us...
942             if not action:
943                 self.rejects.append("%s: can't read file." % (dsc_filename))
944         except ParseChangesError, line:
945             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
946         except InvalidDscError, line:
947             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
948         except ChangesUnicodeError:
949             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
950
951         # Build up the file list of files mentioned by the .dsc
952         try:
953             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
954         except NoFilesFieldError:
955             self.rejects.append("%s: no Files: field." % (dsc_filename))
956             return False
957         except UnknownFormatError, format:
958             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
959             return False
960         except ParseChangesError, line:
961             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
962             return False
963
964         # Enforce mandatory fields
965         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
966             if not self.pkg.dsc.has_key(i):
967                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
968                 return False
969
970         # Validate the source and version fields
971         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
972             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
973         if not re_valid_version.match(self.pkg.dsc["version"]):
974             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
975
976         # Only a limited list of source formats are allowed in each suite
977         for dist in self.pkg.changes["distribution"].keys():
978             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
979             if self.pkg.dsc["format"] not in allowed:
980                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
981
982         # Validate the Maintainer field
983         try:
984             # We ignore the return value
985             fix_maintainer(self.pkg.dsc["maintainer"])
986         except ParseMaintError, msg:
987             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
988                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
989
990         # Validate the build-depends field(s)
991         for field_name in [ "build-depends", "build-depends-indep" ]:
992             field = self.pkg.dsc.get(field_name)
993             if field:
994                 # Have apt try to parse them...
995                 try:
996                     apt_pkg.ParseSrcDepends(field)
997                 except:
998                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
999
1000         # Ensure the version number in the .dsc matches the version number in the .changes
1001         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1002         changes_version = self.pkg.files[dsc_filename]["version"]
1003
1004         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1005             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1006
1007         # Ensure the Files field contain only what's expected
1008         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1009
1010         # Ensure source is newer than existing source in target suites
1011         session = DBConn().session()
1012         self.check_source_against_db(dsc_filename, session)
1013         self.check_dsc_against_db(dsc_filename, session)
1014         session.close()
1015
1016         # Finally, check if we're missing any files
1017         for f in self.later_check_files:
1018             self.rejects.append("Could not find file %s references in changes" % f)
1019
1020         return True
1021
1022     ###########################################################################
1023
1024     def get_changelog_versions(self, source_dir):
1025         """Extracts a the source package and (optionally) grabs the
1026         version history out of debian/changelog for the BTS."""
1027
1028         cnf = Config()
1029
1030         # Find the .dsc (again)
1031         dsc_filename = None
1032         for f in self.pkg.files.keys():
1033             if self.pkg.files[f]["type"] == "dsc":
1034                 dsc_filename = f
1035
1036         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1037         if not dsc_filename:
1038             return
1039
1040         # Create a symlink mirror of the source files in our temporary directory
1041         for f in self.pkg.files.keys():
1042             m = re_issource.match(f)
1043             if m:
1044                 src = os.path.join(source_dir, f)
1045                 # If a file is missing for whatever reason, give up.
1046                 if not os.path.exists(src):
1047                     return
1048                 ftype = m.group(3)
1049                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1050                    self.pkg.orig_files[f].has_key("path"):
1051                     continue
1052                 dest = os.path.join(os.getcwd(), f)
1053                 os.symlink(src, dest)
1054
1055         # If the orig files are not a part of the upload, create symlinks to the
1056         # existing copies.
1057         for orig_file in self.pkg.orig_files.keys():
1058             if not self.pkg.orig_files[orig_file].has_key("path"):
1059                 continue
1060             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1061             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1062
1063         # Extract the source
1064         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1065         (result, output) = commands.getstatusoutput(cmd)
1066         if (result != 0):
1067             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1068             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1069             return
1070
1071         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1072             return
1073
1074         # Get the upstream version
1075         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1076         if re_strip_revision.search(upstr_version):
1077             upstr_version = re_strip_revision.sub('', upstr_version)
1078
1079         # Ensure the changelog file exists
1080         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1081         if not os.path.exists(changelog_filename):
1082             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1083             return
1084
1085         # Parse the changelog
1086         self.pkg.dsc["bts changelog"] = ""
1087         changelog_file = utils.open_file(changelog_filename)
1088         for line in changelog_file.readlines():
1089             m = re_changelog_versions.match(line)
1090             if m:
1091                 self.pkg.dsc["bts changelog"] += line
1092         changelog_file.close()
1093
1094         # Check we found at least one revision in the changelog
1095         if not self.pkg.dsc["bts changelog"]:
1096             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1097
1098     def check_source(self):
1099         # Bail out if:
1100         #    a) there's no source
1101         if not self.pkg.changes["architecture"].has_key("source"):
1102             return
1103
1104         tmpdir = utils.temp_dirname()
1105
1106         # Move into the temporary directory
1107         cwd = os.getcwd()
1108         os.chdir(tmpdir)
1109
1110         # Get the changelog version history
1111         self.get_changelog_versions(cwd)
1112
1113         # Move back and cleanup the temporary tree
1114         os.chdir(cwd)
1115
1116         try:
1117             shutil.rmtree(tmpdir)
1118         except OSError, e:
1119             if e.errno != errno.EACCES:
1120                 print "foobar"
1121                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1122
1123             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1124             # We probably have u-r or u-w directories so chmod everything
1125             # and try again.
1126             cmd = "chmod -R u+rwx %s" % (tmpdir)
1127             result = os.system(cmd)
1128             if result != 0:
1129                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1130             shutil.rmtree(tmpdir)
1131         except Exception, e:
1132             print "foobar2 (%s)" % e
1133             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1134
1135     ###########################################################################
1136     def ensure_hashes(self):
1137         # Make sure we recognise the format of the Files: field in the .changes
1138         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1139         if len(format) == 2:
1140             format = int(format[0]), int(format[1])
1141         else:
1142             format = int(float(format[0])), 0
1143
1144         # We need to deal with the original changes blob, as the fields we need
1145         # might not be in the changes dict serialised into the .dak anymore.
1146         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1147
1148         # Copy the checksums over to the current changes dict.  This will keep
1149         # the existing modifications to it intact.
1150         for field in orig_changes:
1151             if field.startswith('checksums-'):
1152                 self.pkg.changes[field] = orig_changes[field]
1153
1154         # Check for unsupported hashes
1155         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1156             self.rejects.append(j)
1157
1158         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1159             self.rejects.append(j)
1160
1161         # We have to calculate the hash if we have an earlier changes version than
1162         # the hash appears in rather than require it exist in the changes file
1163         for hashname, hashfunc, version in utils.known_hashes:
1164             # TODO: Move _ensure_changes_hash into this class
1165             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1166                 self.rejects.append(j)
1167             if "source" in self.pkg.changes["architecture"]:
1168                 # TODO: Move _ensure_dsc_hash into this class
1169                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1170                     self.rejects.append(j)
1171
1172     def check_hashes(self):
1173         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1174             self.rejects.append(m)
1175
1176         for m in utils.check_size(".changes", self.pkg.files):
1177             self.rejects.append(m)
1178
1179         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1180             self.rejects.append(m)
1181
1182         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1183             self.rejects.append(m)
1184
1185         self.ensure_hashes()
1186
1187     ###########################################################################
1188
1189     def ensure_orig(self, target_dir='.', session=None):
1190         """
1191         Ensures that all orig files mentioned in the changes file are present
1192         in target_dir. If they do not exist, they are symlinked into place.
1193
1194         An list containing the symlinks that were created are returned (so they
1195         can be removed).
1196         """
1197
1198         symlinked = []
1199         cnf = Config()
1200
1201         for filename, entry in self.pkg.dsc_files.iteritems():
1202             if not re_is_orig_source.match(filename):
1203                 # File is not an orig; ignore
1204                 continue
1205
1206             if os.path.exists(filename):
1207                 # File exists, no need to continue
1208                 continue
1209
1210             def symlink_if_valid(path):
1211                 f = utils.open_file(path)
1212                 md5sum = apt_pkg.md5sum(f)
1213                 f.close()
1214
1215                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1216                 expected = (int(entry['size']), entry['md5sum'])
1217
1218                 if fingerprint != expected:
1219                     return False
1220
1221                 dest = os.path.join(target_dir, filename)
1222
1223                 os.symlink(path, dest)
1224                 symlinked.append(dest)
1225
1226                 return True
1227
1228             session_ = session
1229             if session is None:
1230                 session_ = DBConn().session()
1231
1232             found = False
1233
1234             # Look in the pool
1235             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1236                 poolfile_path = os.path.join(
1237                     poolfile.location.path, poolfile.filename
1238                 )
1239
1240                 if symlink_if_valid(poolfile_path):
1241                     found = True
1242                     break
1243
1244             if session is None:
1245                 session_.close()
1246
1247             if found:
1248                 continue
1249
1250             # Look in some other queues for the file
1251             queues = ('New', 'Byhand', 'ProposedUpdates',
1252                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1253
1254             for queue in queues:
1255                 if not cnf.get('Dir::Queue::%s' % queue):
1256                     continue
1257
1258                 queuefile_path = os.path.join(
1259                     cnf['Dir::Queue::%s' % queue], filename
1260                 )
1261
1262                 if not os.path.exists(queuefile_path):
1263                     # Does not exist in this queue
1264                     continue
1265
1266                 if symlink_if_valid(queuefile_path):
1267                     break
1268
1269         return symlinked
1270
1271     ###########################################################################
1272
1273     def check_lintian(self):
1274         """
1275         Extends self.rejects by checking the output of lintian against tags
1276         specified in Dinstall::LintianTags.
1277         """
1278
1279         cnf = Config()
1280
1281         # Don't reject binary uploads
1282         if not self.pkg.changes['architecture'].has_key('source'):
1283             return
1284
1285         # Only check some distributions
1286         for dist in ('unstable', 'experimental'):
1287             if dist in self.pkg.changes['distribution']:
1288                 break
1289         else:
1290             return
1291
1292         # If we do not have a tagfile, don't do anything
1293         tagfile = cnf.get("Dinstall::LintianTags")
1294         if tagfile is None:
1295             return
1296
1297         # Parse the yaml file
1298         sourcefile = file(tagfile, 'r')
1299         sourcecontent = sourcefile.read()
1300         sourcefile.close()
1301
1302         try:
1303             lintiantags = yaml.load(sourcecontent)['lintian']
1304         except yaml.YAMLError, msg:
1305             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1306             return
1307
1308         # Try and find all orig mentioned in the .dsc
1309         symlinked = self.ensure_orig()
1310
1311         # Setup the input file for lintian
1312         fd, temp_filename = utils.temp_filename()
1313         temptagfile = os.fdopen(fd, 'w')
1314         for tags in lintiantags.values():
1315             temptagfile.writelines(['%s\n' % x for x in tags])
1316         temptagfile.close()
1317
1318         try:
1319             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1320                 (temp_filename, self.pkg.changes_file)
1321
1322             result, output = commands.getstatusoutput(cmd)
1323         finally:
1324             # Remove our tempfile and any symlinks we created
1325             os.unlink(temp_filename)
1326
1327             for symlink in symlinked:
1328                 os.unlink(symlink)
1329
1330         if result == 2:
1331             utils.warn("lintian failed for %s [return code: %s]." % \
1332                 (self.pkg.changes_file, result))
1333             utils.warn(utils.prefix_multi_line_string(output, \
1334                 " [possible output:] "))
1335
1336         def log(*txt):
1337             if self.logger:
1338                 self.logger.log(
1339                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1340                 )
1341
1342         # Generate messages
1343         parsed_tags = parse_lintian_output(output)
1344         self.rejects.extend(
1345             generate_reject_messages(parsed_tags, lintiantags, log=log)
1346         )
1347
1348     ###########################################################################
1349     def check_urgency(self):
1350         cnf = Config()
1351         if self.pkg.changes["architecture"].has_key("source"):
1352             if not self.pkg.changes.has_key("urgency"):
1353                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1354             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1355             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1356                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1357                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1358                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1359
1360     ###########################################################################
1361
1362     # Sanity check the time stamps of files inside debs.
1363     # [Files in the near future cause ugly warnings and extreme time
1364     #  travel can cause errors on extraction]
1365
1366     def check_timestamps(self):
1367         Cnf = Config()
1368
1369         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1370         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1371         tar = TarTime(future_cutoff, past_cutoff)
1372
1373         for filename, entry in self.pkg.files.items():
1374             if entry["type"] == "deb":
1375                 tar.reset()
1376                 try:
1377                     deb_file = utils.open_file(filename)
1378                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1379                     deb_file.seek(0)
1380                     try:
1381                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1382                     except SystemError, e:
1383                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1384                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1385                             raise
1386                         deb_file.seek(0)
1387                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1388
1389                     deb_file.close()
1390
1391                     future_files = tar.future_files.keys()
1392                     if future_files:
1393                         num_future_files = len(future_files)
1394                         future_file = future_files[0]
1395                         future_date = tar.future_files[future_file]
1396                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1397                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1398
1399                     ancient_files = tar.ancient_files.keys()
1400                     if ancient_files:
1401                         num_ancient_files = len(ancient_files)
1402                         ancient_file = ancient_files[0]
1403                         ancient_date = tar.ancient_files[ancient_file]
1404                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1405                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1406                 except:
1407                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1408
1409     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1410         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1411             sponsored = False
1412         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1413             sponsored = False
1414             if uid_name == "":
1415                 sponsored = True
1416         else:
1417             sponsored = True
1418             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1419                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1420                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1421                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1422                         self.pkg.changes["sponsoremail"] = uid_email
1423
1424         return sponsored
1425
1426
1427     ###########################################################################
1428     # check_signed_by_key checks
1429     ###########################################################################
1430
1431     def check_signed_by_key(self):
1432         """Ensure the .changes is signed by an authorized uploader."""
1433         session = DBConn().session()
1434
1435         # First of all we check that the person has proper upload permissions
1436         # and that this upload isn't blocked
1437         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1438
1439         if fpr is None:
1440             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1441             return
1442
1443         # TODO: Check that import-keyring adds UIDs properly
1444         if not fpr.uid:
1445             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1446             return
1447
1448         # Check that the fingerprint which uploaded has permission to do so
1449         self.check_upload_permissions(fpr, session)
1450
1451         # Check that this package is not in a transition
1452         self.check_transition(session)
1453
1454         session.close()
1455
1456
1457     def check_upload_permissions(self, fpr, session):
1458         # Check any one-off upload blocks
1459         self.check_upload_blocks(fpr, session)
1460
1461         # Start with DM as a special case
1462         # DM is a special case unfortunately, so we check it first
1463         # (keys with no source access get more access than DMs in one
1464         #  way; DMs can only upload for their packages whether source
1465         #  or binary, whereas keys with no access might be able to
1466         #  upload some binaries)
1467         if fpr.source_acl.access_level == 'dm':
1468             self.check_dm_upload(fpr, session)
1469         else:
1470             # Check source-based permissions for other types
1471             if self.pkg.changes["architecture"].has_key("source") and \
1472                 fpr.source_acl.access_level is None:
1473                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1474                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1475                 self.rejects.append(rej)
1476                 return
1477             # If not a DM, we allow full upload rights
1478             uid_email = "%s@debian.org" % (fpr.uid.uid)
1479             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1480
1481
1482         # Check binary upload permissions
1483         # By this point we know that DMs can't have got here unless they
1484         # are allowed to deal with the package concerned so just apply
1485         # normal checks
1486         if fpr.binary_acl.access_level == 'full':
1487             return
1488
1489         # Otherwise we're in the map case
1490         tmparches = self.pkg.changes["architecture"].copy()
1491         tmparches.pop('source', None)
1492
1493         for bam in fpr.binary_acl_map:
1494             tmparches.pop(bam.architecture.arch_string, None)
1495
1496         if len(tmparches.keys()) > 0:
1497             if fpr.binary_reject:
1498                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1499                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1500                 self.rejects.append(rej)
1501             else:
1502                 # TODO: This is where we'll implement reject vs throw away binaries later
1503                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1504                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1505                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1506                 self.rejects.append(rej)
1507
1508
1509     def check_upload_blocks(self, fpr, session):
1510         """Check whether any upload blocks apply to this source, source
1511            version, uid / fpr combination"""
1512
1513         def block_rej_template(fb):
1514             rej = 'Manual upload block in place for package %s' % fb.source
1515             if fb.version is not None:
1516                 rej += ', version %s' % fb.version
1517             return rej
1518
1519         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1520             # version is None if the block applies to all versions
1521             if fb.version is None or fb.version == self.pkg.changes['version']:
1522                 # Check both fpr and uid - either is enough to cause a reject
1523                 if fb.fpr is not None:
1524                     if fb.fpr.fingerprint == fpr.fingerprint:
1525                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1526                 if fb.uid is not None:
1527                     if fb.uid == fpr.uid:
1528                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1529
1530
1531     def check_dm_upload(self, fpr, session):
1532         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1533         ## none of the uploaded packages are NEW
1534         rej = False
1535         for f in self.pkg.files.keys():
1536             if self.pkg.files[f].has_key("byhand"):
1537                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1538                 rej = True
1539             if self.pkg.files[f].has_key("new"):
1540                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1541                 rej = True
1542
1543         if rej:
1544             return
1545
1546         ## the most recent version of the package uploaded to unstable or
1547         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1548         ## section of its control file
1549         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1550         q = q.join(SrcAssociation)
1551         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1552         q = q.order_by(desc('source.version')).limit(1)
1553
1554         r = q.all()
1555
1556         if len(r) != 1:
1557             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1558             self.rejects.append(rej)
1559             return
1560
1561         r = r[0]
1562         if not r.dm_upload_allowed:
1563             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1564             self.rejects.append(rej)
1565             return
1566
1567         ## the Maintainer: field of the uploaded .changes file corresponds with
1568         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1569         ## uploads)
1570         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1571             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1572
1573         ## the most recent version of the package uploaded to unstable or
1574         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1575         ## non-developer maintainers cannot NMU or hijack packages)
1576
1577         # srcuploaders includes the maintainer
1578         accept = False
1579         for sup in r.srcuploaders:
1580             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1581             # Eww - I hope we never have two people with the same name in Debian
1582             if email == fpr.uid.uid or name == fpr.uid.name:
1583                 accept = True
1584                 break
1585
1586         if not accept:
1587             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1588             return
1589
1590         ## none of the packages are being taken over from other source packages
1591         for b in self.pkg.changes["binary"].keys():
1592             for suite in self.pkg.changes["distribution"].keys():
1593                 q = session.query(DBSource)
1594                 q = q.join(DBBinary).filter_by(package=b)
1595                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1596
1597                 for s in q.all():
1598                     if s.source != self.pkg.changes["source"]:
1599                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1600
1601
1602
1603     def check_transition(self, session):
1604         cnf = Config()
1605
1606         sourcepkg = self.pkg.changes["source"]
1607
1608         # No sourceful upload -> no need to do anything else, direct return
1609         # We also work with unstable uploads, not experimental or those going to some
1610         # proposed-updates queue
1611         if "source" not in self.pkg.changes["architecture"] or \
1612            "unstable" not in self.pkg.changes["distribution"]:
1613             return
1614
1615         # Also only check if there is a file defined (and existant) with
1616         # checks.
1617         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1618         if transpath == "" or not os.path.exists(transpath):
1619             return
1620
1621         # Parse the yaml file
1622         sourcefile = file(transpath, 'r')
1623         sourcecontent = sourcefile.read()
1624         try:
1625             transitions = yaml.load(sourcecontent)
1626         except yaml.YAMLError, msg:
1627             # This shouldn't happen, there is a wrapper to edit the file which
1628             # checks it, but we prefer to be safe than ending up rejecting
1629             # everything.
1630             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1631             return
1632
1633         # Now look through all defined transitions
1634         for trans in transitions:
1635             t = transitions[trans]
1636             source = t["source"]
1637             expected = t["new"]
1638
1639             # Will be None if nothing is in testing.
1640             current = get_source_in_suite(source, "testing", session)
1641             if current is not None:
1642                 compare = apt_pkg.VersionCompare(current.version, expected)
1643
1644             if current is None or compare < 0:
1645                 # This is still valid, the current version in testing is older than
1646                 # the new version we wait for, or there is none in testing yet
1647
1648                 # Check if the source we look at is affected by this.
1649                 if sourcepkg in t['packages']:
1650                     # The source is affected, lets reject it.
1651
1652                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1653                         sourcepkg, trans)
1654
1655                     if current is not None:
1656                         currentlymsg = "at version %s" % (current.version)
1657                     else:
1658                         currentlymsg = "not present in testing"
1659
1660                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1661
1662                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1663 is part of a testing transition designed to get %s migrated (it is
1664 currently %s, we need version %s).  This transition is managed by the
1665 Release Team, and %s is the Release-Team member responsible for it.
1666 Please mail debian-release@lists.debian.org or contact %s directly if you
1667 need further assistance.  You might want to upload to experimental until this
1668 transition is done."""
1669                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1670
1671                     self.rejects.append(rejectmsg)
1672                     return
1673
1674     ###########################################################################
1675     # End check_signed_by_key checks
1676     ###########################################################################
1677
1678     def build_summaries(self):
1679         """ Build a summary of changes the upload introduces. """
1680
1681         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1682
1683         short_summary = summary
1684
1685         # This is for direport's benefit...
1686         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1687
1688         if byhand or new:
1689             summary += "Changes: " + f
1690
1691         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1692
1693         summary += self.announce(short_summary, 0)
1694
1695         return (summary, short_summary)
1696
1697     ###########################################################################
1698
1699     def close_bugs(self, summary, action):
1700         """
1701         Send mail to close bugs as instructed by the closes field in the changes file.
1702         Also add a line to summary if any work was done.
1703
1704         @type summary: string
1705         @param summary: summary text, as given by L{build_summaries}
1706
1707         @type action: bool
1708         @param action: Set to false no real action will be done.
1709
1710         @rtype: string
1711         @return: summary. If action was taken, extended by the list of closed bugs.
1712
1713         """
1714
1715         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1716
1717         bugs = self.pkg.changes["closes"].keys()
1718
1719         if not bugs:
1720             return summary
1721
1722         bugs.sort()
1723         summary += "Closing bugs: "
1724         for bug in bugs:
1725             summary += "%s " % (bug)
1726             if action:
1727                 self.update_subst()
1728                 self.Subst["__BUG_NUMBER__"] = bug
1729                 if self.pkg.changes["distribution"].has_key("stable"):
1730                     self.Subst["__STABLE_WARNING__"] = """
1731 Note that this package is not part of the released stable Debian
1732 distribution.  It may have dependencies on other unreleased software,
1733 or other instabilities.  Please take care if you wish to install it.
1734 The update will eventually make its way into the next released Debian
1735 distribution."""
1736                 else:
1737                     self.Subst["__STABLE_WARNING__"] = ""
1738                 mail_message = utils.TemplateSubst(self.Subst, template)
1739                 utils.send_mail(mail_message)
1740
1741                 # Clear up after ourselves
1742                 del self.Subst["__BUG_NUMBER__"]
1743                 del self.Subst["__STABLE_WARNING__"]
1744
1745         if action and self.logger:
1746             self.logger.log(["closing bugs"] + bugs)
1747
1748         summary += "\n"
1749
1750         return summary
1751
1752     ###########################################################################
1753
1754     def announce(self, short_summary, action):
1755         """
1756         Send an announce mail about a new upload.
1757
1758         @type short_summary: string
1759         @param short_summary: Short summary text to include in the mail
1760
1761         @type action: bool
1762         @param action: Set to false no real action will be done.
1763
1764         @rtype: string
1765         @return: Textstring about action taken.
1766
1767         """
1768
1769         cnf = Config()
1770         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1771
1772         # Only do announcements for source uploads with a recent dpkg-dev installed
1773         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1774            self.pkg.changes["architecture"].has_key("source"):
1775             return ""
1776
1777         lists_done = {}
1778         summary = ""
1779
1780         self.Subst["__SHORT_SUMMARY__"] = short_summary
1781
1782         for dist in self.pkg.changes["distribution"].keys():
1783             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1784             if announce_list == "" or lists_done.has_key(announce_list):
1785                 continue
1786
1787             lists_done[announce_list] = 1
1788             summary += "Announcing to %s\n" % (announce_list)
1789
1790             if action:
1791                 self.update_subst()
1792                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1793                 if cnf.get("Dinstall::TrackingServer") and \
1794                    self.pkg.changes["architecture"].has_key("source"):
1795                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1796                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1797
1798                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1799                 utils.send_mail(mail_message)
1800
1801                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1802
1803         if cnf.FindB("Dinstall::CloseBugs"):
1804             summary = self.close_bugs(summary, action)
1805
1806         del self.Subst["__SHORT_SUMMARY__"]
1807
1808         return summary
1809
1810     ###########################################################################
1811     @session_wrapper
1812     def accept (self, summary, short_summary, session=None):
1813         """
1814         Accept an upload.
1815
1816         This moves all files referenced from the .changes into the pool,
1817         sends the accepted mail, announces to lists, closes bugs and
1818         also checks for override disparities. If enabled it will write out
1819         the version history for the BTS Version Tracking and will finally call
1820         L{queue_build}.
1821
1822         @type summary: string
1823         @param summary: Summary text
1824
1825         @type short_summary: string
1826         @param short_summary: Short summary
1827         """
1828
1829         cnf = Config()
1830         stats = SummaryStats()
1831
1832         print "Installing."
1833         self.logger.log(["installing changes", self.pkg.changes_file])
1834
1835         poolfiles = []
1836
1837         # Add the .dsc file to the DB first
1838         for newfile, entry in self.pkg.files.items():
1839             if entry["type"] == "dsc":
1840                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1841                 for j in pfs:
1842                     poolfiles.append(j)
1843
1844         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1845         for newfile, entry in self.pkg.files.items():
1846             if entry["type"] == "deb":
1847                 poolfiles.append(add_deb_to_db(self, newfile, session))
1848
1849         # If this is a sourceful diff only upload that is moving
1850         # cross-component we need to copy the .orig files into the new
1851         # component too for the same reasons as above.
1852         # XXX: mhy: I think this should be in add_dsc_to_db
1853         if self.pkg.changes["architecture"].has_key("source"):
1854             for orig_file in self.pkg.orig_files.keys():
1855                 if not self.pkg.orig_files[orig_file].has_key("id"):
1856                     continue # Skip if it's not in the pool
1857                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1858                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1859                     continue # Skip if the location didn't change
1860
1861                 # Do the move
1862                 oldf = get_poolfile_by_id(orig_file_id, session)
1863                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1864                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1865                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1866
1867                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1868
1869                 # TODO: Care about size/md5sum collisions etc
1870                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1871
1872                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1873                 if newf is None:
1874                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1875                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1876
1877                     session.flush()
1878
1879                     # Don't reference the old file from this changes
1880                     for p in poolfiles:
1881                         if p.file_id == oldf.file_id:
1882                             poolfiles.remove(p)
1883
1884                     poolfiles.append(newf)
1885
1886                     # Fix up the DSC references
1887                     toremove = []
1888
1889                     for df in source.srcfiles:
1890                         if df.poolfile.file_id == oldf.file_id:
1891                             # Add a new DSC entry and mark the old one for deletion
1892                             # Don't do it in the loop so we don't change the thing we're iterating over
1893                             newdscf = DSCFile()
1894                             newdscf.source_id = source.source_id
1895                             newdscf.poolfile_id = newf.file_id
1896                             session.add(newdscf)
1897
1898                             toremove.append(df)
1899
1900                     for df in toremove:
1901                         session.delete(df)
1902
1903                     # Flush our changes
1904                     session.flush()
1905
1906                     # Make sure that our source object is up-to-date
1907                     session.expire(source)
1908
1909         # Add changelog information to the database
1910         self.store_changelog()
1911
1912         # Install the files into the pool
1913         for newfile, entry in self.pkg.files.items():
1914             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1915             utils.move(newfile, destination)
1916             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1917             stats.accept_bytes += float(entry["size"])
1918
1919         # Copy the .changes file across for suite which need it.
1920         copy_changes = {}
1921         for suite_name in self.pkg.changes["distribution"].keys():
1922             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1923                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1924
1925         for dest in copy_changes.keys():
1926             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1927
1928         # We're done - commit the database changes
1929         session.commit()
1930         # Our SQL session will automatically start a new transaction after
1931         # the last commit
1932
1933         # Move the .changes into the 'done' directory
1934         utils.move(self.pkg.changes_file,
1935                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1936
1937         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1938             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1939
1940         self.update_subst()
1941         self.Subst["__SUITE__"] = ""
1942         self.Subst["__SUMMARY__"] = summary
1943         mail_message = utils.TemplateSubst(self.Subst,
1944                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1945         utils.send_mail(mail_message)
1946         self.announce(short_summary, 1)
1947
1948         ## Helper stuff for DebBugs Version Tracking
1949         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1950             if self.pkg.changes["architecture"].has_key("source"):
1951                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952                 version_history = os.fdopen(fd, 'w')
1953                 version_history.write(self.pkg.dsc["bts changelog"])
1954                 version_history.close()
1955                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1956                                       self.pkg.changes_file[:-8]+".versions")
1957                 os.rename(temp_filename, filename)
1958                 os.chmod(filename, 0644)
1959
1960             # Write out the binary -> source mapping.
1961             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1962             debinfo = os.fdopen(fd, 'w')
1963             for name, entry in sorted(self.pkg.files.items()):
1964                 if entry["type"] == "deb":
1965                     line = " ".join([entry["package"], entry["version"],
1966                                      entry["architecture"], entry["source package"],
1967                                      entry["source version"]])
1968                     debinfo.write(line+"\n")
1969             debinfo.close()
1970             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1971                                   self.pkg.changes_file[:-8]+".debinfo")
1972             os.rename(temp_filename, filename)
1973             os.chmod(filename, 0644)
1974
1975         session.commit()
1976
1977         # Set up our copy queues (e.g. buildd queues)
1978         for suite_name in self.pkg.changes["distribution"].keys():
1979             suite = get_suite(suite_name, session)
1980             for q in suite.copy_queues:
1981                 for f in poolfiles:
1982                     q.add_file_from_pool(f)
1983
1984         session.commit()
1985
1986         # Finally...
1987         stats.accept_count += 1
1988
1989     def check_override(self):
1990         """
1991         Checks override entries for validity. Mails "Override disparity" warnings,
1992         if that feature is enabled.
1993
1994         Abandons the check if
1995           - override disparity checks are disabled
1996           - mail sending is disabled
1997         """
1998
1999         cnf = Config()
2000
2001         # Abandon the check if override disparity checks have been disabled
2002         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2003             return
2004
2005         summary = self.pkg.check_override()
2006
2007         if summary == "":
2008             return
2009
2010         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2011
2012         self.update_subst()
2013         self.Subst["__SUMMARY__"] = summary
2014         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2015         utils.send_mail(mail_message)
2016         del self.Subst["__SUMMARY__"]
2017
2018     ###########################################################################
2019
2020     def remove(self, from_dir=None):
2021         """
2022         Used (for instance) in p-u to remove the package from unchecked
2023
2024         Also removes the package from holding area.
2025         """
2026         if from_dir is None:
2027             from_dir = self.pkg.directory
2028         h = Holding()
2029
2030         for f in self.pkg.files.keys():
2031             os.unlink(os.path.join(from_dir, f))
2032             if os.path.exists(os.path.join(h.holding_dir, f)):
2033                 os.unlink(os.path.join(h.holding_dir, f))
2034
2035         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2036         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2037             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2038
2039     ###########################################################################
2040
2041     def move_to_queue (self, queue):
2042         """
2043         Move files to a destination queue using the permissions in the table
2044         """
2045         h = Holding()
2046         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2047                    queue.path, perms=int(queue.change_perms, 8))
2048         for f in self.pkg.files.keys():
2049             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2050
2051     ###########################################################################
2052
2053     def force_reject(self, reject_files):
2054         """
2055         Forcefully move files from the current directory to the
2056         reject directory.  If any file already exists in the reject
2057         directory it will be moved to the morgue to make way for
2058         the new file.
2059
2060         @type reject_files: dict
2061         @param reject_files: file dictionary
2062
2063         """
2064
2065         cnf = Config()
2066
2067         for file_entry in reject_files:
2068             # Skip any files which don't exist or which we don't have permission to copy.
2069             if os.access(file_entry, os.R_OK) == 0:
2070                 continue
2071
2072             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2073
2074             try:
2075                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2076             except OSError, e:
2077                 # File exists?  Let's find a new name by adding a number
2078                 if e.errno == errno.EEXIST:
2079                     try:
2080                         dest_file = utils.find_next_free(dest_file, 255)
2081                     except NoFreeFilenameError:
2082                         # Something's either gone badly Pete Tong, or
2083                         # someone is trying to exploit us.
2084                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2085                         return
2086
2087                     # Make sure we really got it
2088                     try:
2089                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2090                     except OSError, e:
2091                         # Likewise
2092                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2093                         return
2094                 else:
2095                     raise
2096             # If we got here, we own the destination file, so we can
2097             # safely overwrite it.
2098             utils.move(file_entry, dest_file, 1, perms=0660)
2099             os.close(dest_fd)
2100
2101     ###########################################################################
2102     def do_reject (self, manual=0, reject_message="", notes=""):
2103         """
2104         Reject an upload. If called without a reject message or C{manual} is
2105         true, spawn an editor so the user can write one.
2106
2107         @type manual: bool
2108         @param manual: manual or automated rejection
2109
2110         @type reject_message: string
2111         @param reject_message: A reject message
2112
2113         @return: 0
2114
2115         """
2116         # If we weren't given a manual rejection message, spawn an
2117         # editor so the user can add one in...
2118         if manual and not reject_message:
2119             (fd, temp_filename) = utils.temp_filename()
2120             temp_file = os.fdopen(fd, 'w')
2121             if len(notes) > 0:
2122                 for note in notes:
2123                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2124                                     % (note.author, note.version, note.notedate, note.comment))
2125             temp_file.close()
2126             editor = os.environ.get("EDITOR","vi")
2127             answer = 'E'
2128             while answer == 'E':
2129                 os.system("%s %s" % (editor, temp_filename))
2130                 temp_fh = utils.open_file(temp_filename)
2131                 reject_message = "".join(temp_fh.readlines())
2132                 temp_fh.close()
2133                 print "Reject message:"
2134                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2135                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2136                 answer = "XXX"
2137                 while prompt.find(answer) == -1:
2138                     answer = utils.our_raw_input(prompt)
2139                     m = re_default_answer.search(prompt)
2140                     if answer == "":
2141                         answer = m.group(1)
2142                     answer = answer[:1].upper()
2143             os.unlink(temp_filename)
2144             if answer == 'A':
2145                 return 1
2146             elif answer == 'Q':
2147                 sys.exit(0)
2148
2149         print "Rejecting.\n"
2150
2151         cnf = Config()
2152
2153         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2154         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2155
2156         # Move all the files into the reject directory
2157         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2158         self.force_reject(reject_files)
2159
2160         # If we fail here someone is probably trying to exploit the race
2161         # so let's just raise an exception ...
2162         if os.path.exists(reason_filename):
2163             os.unlink(reason_filename)
2164         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2165
2166         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2167
2168         self.update_subst()
2169         if not manual:
2170             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2171             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2172             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2173             os.write(reason_fd, reject_message)
2174             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2175         else:
2176             # Build up the rejection email
2177             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2178             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2179             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2180             self.Subst["__REJECT_MESSAGE__"] = ""
2181             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2182             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2183             # Write the rejection email out as the <foo>.reason file
2184             os.write(reason_fd, reject_mail_message)
2185
2186         del self.Subst["__REJECTOR_ADDRESS__"]
2187         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2188         del self.Subst["__CC__"]
2189
2190         os.close(reason_fd)
2191
2192         # Send the rejection mail
2193         utils.send_mail(reject_mail_message)
2194
2195         if self.logger:
2196             self.logger.log(["rejected", self.pkg.changes_file])
2197
2198         return 0
2199
2200     ################################################################################
2201     def in_override_p(self, package, component, suite, binary_type, filename, session):
2202         """
2203         Check if a package already has override entries in the DB
2204
2205         @type package: string
2206         @param package: package name
2207
2208         @type component: string
2209         @param component: database id of the component
2210
2211         @type suite: int
2212         @param suite: database id of the suite
2213
2214         @type binary_type: string
2215         @param binary_type: type of the package
2216
2217         @type filename: string
2218         @param filename: filename we check
2219
2220         @return: the database result. But noone cares anyway.
2221
2222         """
2223
2224         cnf = Config()
2225
2226         if binary_type == "": # must be source
2227             file_type = "dsc"
2228         else:
2229             file_type = binary_type
2230
2231         # Override suite name; used for example with proposed-updates
2232         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2233             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2234
2235         result = get_override(package, suite, component, file_type, session)
2236
2237         # If checking for a source package fall back on the binary override type
2238         if file_type == "dsc" and len(result) < 1:
2239             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2240
2241         # Remember the section and priority so we can check them later if appropriate
2242         if len(result) > 0:
2243             result = result[0]
2244             self.pkg.files[filename]["override section"] = result.section.section
2245             self.pkg.files[filename]["override priority"] = result.priority.priority
2246             return result
2247
2248         return None
2249
2250     ################################################################################
2251     def get_anyversion(self, sv_list, suite):
2252         """
2253         @type sv_list: list
2254         @param sv_list: list of (suite, version) tuples to check
2255
2256         @type suite: string
2257         @param suite: suite name
2258
2259         Description: TODO
2260         """
2261         Cnf = Config()
2262         anyversion = None
2263         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2264         for (s, v) in sv_list:
2265             if s in [ x.lower() for x in anysuite ]:
2266                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2267                     anyversion = v
2268
2269         return anyversion
2270
2271     ################################################################################
2272
2273     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2274         """
2275         @type sv_list: list
2276         @param sv_list: list of (suite, version) tuples to check
2277
2278         @type filename: string
2279         @param filename: XXX
2280
2281         @type new_version: string
2282         @param new_version: XXX
2283
2284         Ensure versions are newer than existing packages in target
2285         suites and that cross-suite version checking rules as
2286         set out in the conf file are satisfied.
2287         """
2288
2289         cnf = Config()
2290
2291         # Check versions for each target suite
2292         for target_suite in self.pkg.changes["distribution"].keys():
2293             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2294             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2295
2296             # Enforce "must be newer than target suite" even if conffile omits it
2297             if target_suite not in must_be_newer_than:
2298                 must_be_newer_than.append(target_suite)
2299
2300             for (suite, existent_version) in sv_list:
2301                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2302
2303                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2304                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2305
2306                 if suite in must_be_older_than and vercmp > -1:
2307                     cansave = 0
2308
2309                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2310                         # we really use the other suite, ignoring the conflicting one ...
2311                         addsuite = self.pkg.changes["distribution-version"][suite]
2312
2313                         add_version = self.get_anyversion(sv_list, addsuite)
2314                         target_version = self.get_anyversion(sv_list, target_suite)
2315
2316                         if not add_version:
2317                             # not add_version can only happen if we map to a suite
2318                             # that doesn't enhance the suite we're propup'ing from.
2319                             # so "propup-ver x a b c; map a d" is a problem only if
2320                             # d doesn't enhance a.
2321                             #
2322                             # i think we could always propagate in this case, rather
2323                             # than complaining. either way, this isn't a REJECT issue
2324                             #
2325                             # And - we really should complain to the dorks who configured dak
2326                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2327                             self.pkg.changes.setdefault("propdistribution", {})
2328                             self.pkg.changes["propdistribution"][addsuite] = 1
2329                             cansave = 1
2330                         elif not target_version:
2331                             # not targets_version is true when the package is NEW
2332                             # we could just stick with the "...old version..." REJECT
2333                             # for this, I think.
2334                             self.rejects.append("Won't propogate NEW packages.")
2335                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2336                             # propogation would be redundant. no need to reject though.
2337                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2338                             cansave = 1
2339                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2340                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2341                             # propogate!!
2342                             self.warnings.append("Propogating upload to %s" % (addsuite))
2343                             self.pkg.changes.setdefault("propdistribution", {})
2344                             self.pkg.changes["propdistribution"][addsuite] = 1
2345                             cansave = 1
2346
2347                     if not cansave:
2348                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2349
2350     ################################################################################
2351     def check_binary_against_db(self, filename, session):
2352         # Ensure version is sane
2353         q = session.query(BinAssociation)
2354         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2355         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2356
2357         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2358                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2359
2360         # Check for any existing copies of the file
2361         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2362         q = q.filter_by(version=self.pkg.files[filename]["version"])
2363         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2364
2365         if q.count() > 0:
2366             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2367
2368     ################################################################################
2369
2370     def check_source_against_db(self, filename, session):
2371         source = self.pkg.dsc.get("source")
2372         version = self.pkg.dsc.get("version")
2373
2374         # Ensure version is sane
2375         q = session.query(SrcAssociation)
2376         q = q.join(DBSource).filter(DBSource.source==source)
2377
2378         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2379                                        filename, version, sourceful=True)
2380
2381     ################################################################################
2382     def check_dsc_against_db(self, filename, session):
2383         """
2384
2385         @warning: NB: this function can remove entries from the 'files' index [if
2386          the orig tarball is a duplicate of the one in the archive]; if
2387          you're iterating over 'files' and call this function as part of
2388          the loop, be sure to add a check to the top of the loop to
2389          ensure you haven't just tried to dereference the deleted entry.
2390
2391         """
2392
2393         Cnf = Config()
2394         self.pkg.orig_files = {} # XXX: do we need to clear it?
2395         orig_files = self.pkg.orig_files
2396
2397         # Try and find all files mentioned in the .dsc.  This has
2398         # to work harder to cope with the multiple possible
2399         # locations of an .orig.tar.gz.
2400         # The ordering on the select is needed to pick the newest orig
2401         # when it exists in multiple places.
2402         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2403             found = None
2404             if self.pkg.files.has_key(dsc_name):
2405                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2406                 actual_size = int(self.pkg.files[dsc_name]["size"])
2407                 found = "%s in incoming" % (dsc_name)
2408
2409                 # Check the file does not already exist in the archive
2410                 ql = get_poolfile_like_name(dsc_name, session)
2411
2412                 # Strip out anything that isn't '%s' or '/%s$'
2413                 for i in ql:
2414                     if not i.filename.endswith(dsc_name):
2415                         ql.remove(i)
2416
2417                 # "[dak] has not broken them.  [dak] has fixed a
2418                 # brokenness.  Your crappy hack exploited a bug in
2419                 # the old dinstall.
2420                 #
2421                 # "(Come on!  I thought it was always obvious that
2422                 # one just doesn't release different files with
2423                 # the same name and version.)"
2424                 #                        -- ajk@ on d-devel@l.d.o
2425
2426                 if len(ql) > 0:
2427                     # Ignore exact matches for .orig.tar.gz
2428                     match = 0
2429                     if re_is_orig_source.match(dsc_name):
2430                         for i in ql:
2431                             if self.pkg.files.has_key(dsc_name) and \
2432                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2433                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2434                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2435                                 # TODO: Don't delete the entry, just mark it as not needed
2436                                 # This would fix the stupidity of changing something we often iterate over
2437                                 # whilst we're doing it
2438                                 del self.pkg.files[dsc_name]
2439                                 dsc_entry["files id"] = i.file_id
2440                                 if not orig_files.has_key(dsc_name):
2441                                     orig_files[dsc_name] = {}
2442                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2443                                 match = 1
2444
2445                                 # Don't bitch that we couldn't find this file later
2446                                 try:
2447                                     self.later_check_files.remove(dsc_name)
2448                                 except ValueError:
2449                                     pass
2450
2451
2452                     if not match:
2453                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2454
2455             elif re_is_orig_source.match(dsc_name):
2456                 # Check in the pool
2457                 ql = get_poolfile_like_name(dsc_name, session)
2458
2459                 # Strip out anything that isn't '%s' or '/%s$'
2460                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2461                 for i in ql:
2462                     if not i.filename.endswith(dsc_name):
2463                         ql.remove(i)
2464
2465                 if len(ql) > 0:
2466                     # Unfortunately, we may get more than one match here if,
2467                     # for example, the package was in potato but had an -sa
2468                     # upload in woody.  So we need to choose the right one.
2469
2470                     # default to something sane in case we don't match any or have only one
2471                     x = ql[0]
2472
2473                     if len(ql) > 1:
2474                         for i in ql:
2475                             old_file = os.path.join(i.location.path, i.filename)
2476                             old_file_fh = utils.open_file(old_file)
2477                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2478                             old_file_fh.close()
2479                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2480                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2481                                 x = i
2482
2483                     old_file = os.path.join(i.location.path, i.filename)
2484                     old_file_fh = utils.open_file(old_file)
2485                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2486                     old_file_fh.close()
2487                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2488                     found = old_file
2489                     suite_type = x.location.archive_type
2490                     # need this for updating dsc_files in install()
2491                     dsc_entry["files id"] = x.file_id
2492                     # See install() in process-accepted...
2493                     if not orig_files.has_key(dsc_name):
2494                         orig_files[dsc_name] = {}
2495                     orig_files[dsc_name]["id"] = x.file_id
2496                     orig_files[dsc_name]["path"] = old_file
2497                     orig_files[dsc_name]["location"] = x.location.location_id
2498                 else:
2499                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2500                     # Not there? Check the queue directories...
2501                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2502                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2503                             continue
2504                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2505                         if os.path.exists(in_otherdir):
2506                             in_otherdir_fh = utils.open_file(in_otherdir)
2507                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2508                             in_otherdir_fh.close()
2509                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2510                             found = in_otherdir
2511                             if not orig_files.has_key(dsc_name):
2512                                 orig_files[dsc_name] = {}
2513                             orig_files[dsc_name]["path"] = in_otherdir
2514
2515                     if not found:
2516                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2517                         continue
2518             else:
2519                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2520                 continue
2521             if actual_md5 != dsc_entry["md5sum"]:
2522                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2523             if actual_size != int(dsc_entry["size"]):
2524                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2525
2526     ################################################################################
2527     # This is used by process-new and process-holding to recheck a changes file
2528     # at the time we're running.  It mainly wraps various other internal functions
2529     # and is similar to accepted_checks - these should probably be tidied up
2530     # and combined
2531     def recheck(self, session):
2532         cnf = Config()
2533         for f in self.pkg.files.keys():
2534             # The .orig.tar.gz can disappear out from under us is it's a
2535             # duplicate of one in the archive.
2536             if not self.pkg.files.has_key(f):
2537                 continue
2538
2539             entry = self.pkg.files[f]
2540
2541             # Check that the source still exists
2542             if entry["type"] == "deb":
2543                 source_version = entry["source version"]
2544                 source_package = entry["source package"]
2545                 if not self.pkg.changes["architecture"].has_key("source") \
2546                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2547                     source_epochless_version = re_no_epoch.sub('', source_version)
2548                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2549                     found = False
2550                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2551                         if cnf.has_key("Dir::Queue::%s" % (q)):
2552                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2553                                 found = True
2554                     if not found:
2555                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2556
2557             # Version and file overwrite checks
2558             if entry["type"] == "deb":
2559                 self.check_binary_against_db(f, session)
2560             elif entry["type"] == "dsc":
2561                 self.check_source_against_db(f, session)
2562                 self.check_dsc_against_db(f, session)
2563
2564     ################################################################################
2565     def accepted_checks(self, overwrite_checks, session):
2566         # Recheck anything that relies on the database; since that's not
2567         # frozen between accept and our run time when called from p-a.
2568
2569         # overwrite_checks is set to False when installing to stable/oldstable
2570
2571         propogate={}
2572         nopropogate={}
2573
2574         # Find the .dsc (again)
2575         dsc_filename = None
2576         for f in self.pkg.files.keys():
2577             if self.pkg.files[f]["type"] == "dsc":
2578                 dsc_filename = f
2579
2580         for checkfile in self.pkg.files.keys():
2581             # The .orig.tar.gz can disappear out from under us is it's a
2582             # duplicate of one in the archive.
2583             if not self.pkg.files.has_key(checkfile):
2584                 continue
2585
2586             entry = self.pkg.files[checkfile]
2587
2588             # Check that the source still exists
2589             if entry["type"] == "deb":
2590                 source_version = entry["source version"]
2591                 source_package = entry["source package"]
2592                 if not self.pkg.changes["architecture"].has_key("source") \
2593                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2594                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2595
2596             # Version and file overwrite checks
2597             if overwrite_checks:
2598                 if entry["type"] == "deb":
2599                     self.check_binary_against_db(checkfile, session)
2600                 elif entry["type"] == "dsc":
2601                     self.check_source_against_db(checkfile, session)
2602                     self.check_dsc_against_db(dsc_filename, session)
2603
2604             # propogate in the case it is in the override tables:
2605             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2606                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2607                     propogate[suite] = 1
2608                 else:
2609                     nopropogate[suite] = 1
2610
2611         for suite in propogate.keys():
2612             if suite in nopropogate:
2613                 continue
2614             self.pkg.changes["distribution"][suite] = 1
2615
2616         for checkfile in self.pkg.files.keys():
2617             # Check the package is still in the override tables
2618             for suite in self.pkg.changes["distribution"].keys():
2619                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2620                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2621
2622     ################################################################################
2623     # This is not really a reject, but an unaccept, but since a) the code for
2624     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2625     # extremely rare, for now we'll go with whining at our admin folks...
2626
2627     def do_unaccept(self):
2628         cnf = Config()
2629
2630         self.update_subst()
2631         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2632         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2633         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2634         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2635         if cnf.has_key("Dinstall::Bcc"):
2636             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2637
2638         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2639
2640         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2641
2642         # Write the rejection email out as the <foo>.reason file
2643         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2644         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2645
2646         # If we fail here someone is probably trying to exploit the race
2647         # so let's just raise an exception ...
2648         if os.path.exists(reject_filename):
2649             os.unlink(reject_filename)
2650
2651         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2652         os.write(fd, reject_mail_message)
2653         os.close(fd)
2654
2655         utils.send_mail(reject_mail_message)
2656
2657         del self.Subst["__REJECTOR_ADDRESS__"]
2658         del self.Subst["__REJECT_MESSAGE__"]
2659         del self.Subst["__CC__"]
2660
2661     ################################################################################
2662     # If any file of an upload has a recent mtime then chances are good
2663     # the file is still being uploaded.
2664
2665     def upload_too_new(self):
2666         cnf = Config()
2667         too_new = False
2668         # Move back to the original directory to get accurate time stamps
2669         cwd = os.getcwd()
2670         os.chdir(self.pkg.directory)
2671         file_list = self.pkg.files.keys()
2672         file_list.extend(self.pkg.dsc_files.keys())
2673         file_list.append(self.pkg.changes_file)
2674         for f in file_list:
2675             try:
2676                 last_modified = time.time()-os.path.getmtime(f)
2677                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2678                     too_new = True
2679                     break
2680             except:
2681                 pass
2682
2683         os.chdir(cwd)
2684         return too_new
2685
2686     def store_changelog(self):
2687
2688         # Skip binary-only upload if it is not a bin-NMU
2689         if not self.pkg.changes['architecture'].has_key('source'):
2690             from daklib.regexes import re_bin_only_nmu
2691             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2692                 return
2693
2694         session = DBConn().session()
2695
2696         # Check if upload already has a changelog entry
2697         query = """SELECT changelog_id FROM changes WHERE source = :source
2698                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2699         if session.execute(query, {'source': self.pkg.changes['source'], \
2700                                    'version': self.pkg.changes['version'], \
2701                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2702             session.commit()
2703             return
2704
2705         # Add current changelog text into changelogs_text table, return created ID
2706         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2707         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2708
2709         # Link ID to the upload available in changes table
2710         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2711                    AND version = :version AND architecture = :architecture"""
2712         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2713                                 'version': self.pkg.changes['version'], \
2714                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2715
2716         session.commit()