]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
misc fixes
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import cPickle
30 import errno
31 import os
32 import pg
33 import stat
34 import sys
35 import time
36 import apt_inst
37 import apt_pkg
38 import utils
39 import commands
40 import shutil
41 from types import *
42
43 from dak_exceptions import *
44 from changes import *
45 from regexes import *
46 from config import Config
47 from holding import Holding
48 from dbconn import *
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
53
54 ###############################################################################
55
56 def get_type(f, session):
57     """
58     Get the file type of C{f}
59
60     @type f: dict
61     @param f: file entry from Changes object
62
63     @type session: SQLA Session
64     @param session: SQL Alchemy session object
65
66     @rtype: string
67     @return: filetype
68
69     """
70     # Determine the type
71     if f.has_key("dbtype"):
72         file_type = file["dbtype"]
73     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
74         file_type = "dsc"
75     else:
76         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
77
78     # Validate the override type
79     type_id = get_override_type(file_type, session)
80     if type_id is None:
81         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
82
83     return file_type
84
85 ################################################################################
86
87 # Determine what parts in a .changes are NEW
88
89 def determine_new(changes, files, warn=1):
90     """
91     Determine what parts in a C{changes} file are NEW.
92
93     @type changes: Upload.Pkg.changes dict
94     @param changes: Changes dictionary
95
96     @type files: Upload.Pkg.files dict
97     @param files: Files dictionary
98
99     @type warn: bool
100     @param warn: Warn if overrides are added for (old)stable
101
102     @rtype: dict
103     @return: dictionary of NEW components.
104
105     """
106     new = {}
107
108     session = DBConn().session()
109
110     # Build up a list of potentially new things
111     for name, f in files.items():
112         # Skip byhand elements
113         if f["type"] == "byhand":
114             continue
115         pkg = f["package"]
116         priority = f["priority"]
117         section = f["section"]
118         file_type = get_type(f, session)
119         component = f["component"]
120
121         if file_type == "dsc":
122             priority = "source"
123
124         if not new.has_key(pkg):
125             new[pkg] = {}
126             new[pkg]["priority"] = priority
127             new[pkg]["section"] = section
128             new[pkg]["type"] = file_type
129             new[pkg]["component"] = component
130             new[pkg]["files"] = []
131         else:
132             old_type = new[pkg]["type"]
133             if old_type != file_type:
134                 # source gets trumped by deb or udeb
135                 if old_type == "dsc":
136                     new[pkg]["priority"] = priority
137                     new[pkg]["section"] = section
138                     new[pkg]["type"] = file_type
139                     new[pkg]["component"] = component
140
141         new[pkg]["files"].append(name)
142
143         if f.has_key("othercomponents"):
144             new[pkg]["othercomponents"] = f["othercomponents"]
145
146     for suite in changes["suite"].keys():
147         for pkg in new.keys():
148             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
149             if len(ql) > 0:
150                 for file_entry in new[pkg]["files"]:
151                     if files[file_entry].has_key("new"):
152                         del files[file_entry]["new"]
153                 del new[pkg]
154
155     if warn:
156         for s in ['stable', 'oldstable']:
157             if changes["suite"].has_key(s):
158                 print "WARNING: overrides will be added for %s!" % s
159         for pkg in new.keys():
160             if new[pkg].has_key("othercomponents"):
161                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
162
163     session.close()
164
165     return new
166
167 ################################################################################
168
169 def check_valid(new):
170     """
171     Check if section and priority for NEW packages exist in database.
172     Additionally does sanity checks:
173       - debian-installer packages have to be udeb (or source)
174       - non debian-installer packages can not be udeb
175       - source priority can only be assigned to dsc file types
176
177     @type new: dict
178     @param new: Dict of new packages with their section, priority and type.
179
180     """
181     for pkg in new.keys():
182         section_name = new[pkg]["section"]
183         priority_name = new[pkg]["priority"]
184         file_type = new[pkg]["type"]
185
186         section = get_section(section_name)
187         if section is None:
188             new[pkg]["section id"] = -1
189         else:
190             new[pkg]["section id"] = section.section_id
191
192         priority = get_priority(priority_name)
193         if priority is None:
194             new[pkg]["priority id"] = -1
195         else:
196             new[pkg]["priority id"] = priority.priority_id
197
198         # Sanity checks
199         di = section_name.find("debian-installer") != -1
200
201         # If d-i, we must be udeb and vice-versa
202         if     (di and file_type not in ("udeb", "dsc")) or \
203            (not di and file_type == "udeb"):
204             new[pkg]["section id"] = -1
205
206         # If dsc we need to be source and vice-versa
207         if (priority == "source" and file_type != "dsc") or \
208            (priority != "source" and file_type == "dsc"):
209             new[pkg]["priority id"] = -1
210
211 ###############################################################################
212
213 def lookup_uid_from_fingerprint(fpr, session):
214     uid = None
215     uid_name = ""
216     # This is a stupid default, but see the comments below
217     is_dm = False
218
219     user = get_uid_from_fingerprint(fpr, session)
220
221     if user is not None:
222         uid = user.uid
223         if user.name is None:
224             uid_name = ''
225         else:
226             uid_name = user.name
227
228         # Check the relevant fingerprint (which we have to have)
229         for f in user.fingerprint:
230             if f.fingerprint == fpr:
231                 is_dm = f.keyring.debian_maintainer
232                 break
233
234     return (uid, uid_name, is_dm)
235
236 ###############################################################################
237
238 # Used by Upload.check_timestamps
239 class TarTime(object):
240     def __init__(self, future_cutoff, past_cutoff):
241         self.reset()
242         self.future_cutoff = future_cutoff
243         self.past_cutoff = past_cutoff
244
245     def reset(self):
246         self.future_files = {}
247         self.ancient_files = {}
248
249     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
250         if MTime > self.future_cutoff:
251             self.future_files[Name] = MTime
252         if MTime < self.past_cutoff:
253             self.ancient_files[Name] = MTime
254
255 ###############################################################################
256
257 class Upload(object):
258     """
259     Everything that has to do with an upload processed.
260
261     """
262     def __init__(self):
263         self.logger = None
264         self.pkg = Changes()
265         self.reset()
266
267     ###########################################################################
268
269     def reset (self):
270         """ Reset a number of internal variables."""
271
272         # Initialize the substitution template map
273         cnf = Config()
274         self.Subst = {}
275         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
276         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
277         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
278         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
279
280         self.rejects = []
281         self.warnings = []
282         self.notes = []
283
284         self.pkg.reset()
285
286     def package_info(self):
287         msg = ''
288
289         if len(self.rejects) > 0:
290             msg += "Reject Reasons:\n"
291             msg += "\n".join(self.rejects)
292
293         if len(self.warnings) > 0:
294             msg += "Warnings:\n"
295             msg += "\n".join(self.warnings)
296
297         if len(self.notes) > 0:
298             msg += "Notes:\n"
299             msg += "\n".join(self.notes)
300
301         return msg
302
303     ###########################################################################
304     def update_subst(self):
305         """ Set up the per-package template substitution mappings """
306
307         cnf = Config()
308
309         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310         if not self.pkg.changes.has_key("architecture") or not \
311            isinstance(self.pkg.changes["architecture"], DictType):
312             self.pkg.changes["architecture"] = { "Unknown" : "" }
313
314         # and maintainer2047 may not exist.
315         if not self.pkg.changes.has_key("maintainer2047"):
316             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
317
318         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
321
322         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323         if self.pkg.changes["architecture"].has_key("source") and \
324            self.pkg.changes["changedby822"] != "" and \
325            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
326
327             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
330         else:
331             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
334
335         if "sponsoremail" in self.pkg.changes:
336             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
337
338         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
340
341         # Apply any global override of the Maintainer field
342         if cnf.get("Dinstall::OverrideMaintainer"):
343             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
345
346         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
349
350     ###########################################################################
351     def load_changes(self, filename):
352         """
353         @rtype boolean
354         @rvalue: whether the changes file was valid or not.  We may want to
355                  reject even if this is True (see what gets put in self.rejects).
356                  This is simply to prevent us even trying things later which will
357                  fail because we couldn't properly parse the file.
358         """
359         Cnf = Config()
360         self.pkg.changes_file = filename
361
362         # Parse the .changes field into a dictionary
363         try:
364             self.pkg.changes.update(parse_changes(filename))
365         except CantOpenError:
366             self.rejects.append("%s: can't read file." % (filename))
367             return False
368         except ParseChangesError, line:
369             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
370             return False
371         except ChangesUnicodeError:
372             self.rejects.append("%s: changes file not proper utf-8" % (filename))
373             return False
374
375         # Parse the Files field from the .changes into another dictionary
376         try:
377             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
378         except ParseChangesError, line:
379             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
380             return False
381         except UnknownFormatError, format:
382             self.rejects.append("%s: unknown format '%s'." % (filename, format))
383             return False
384
385         # Check for mandatory fields
386         for i in ("distribution", "source", "binary", "architecture",
387                   "version", "maintainer", "files", "changes", "description"):
388             if not self.pkg.changes.has_key(i):
389                 # Avoid undefined errors later
390                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
391                 return False
392
393         # Strip a source version in brackets from the source field
394         if re_strip_srcver.search(self.pkg.changes["source"]):
395             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
396
397         # Ensure the source field is a valid package name.
398         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
399             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
400
401         # Split multi-value fields into a lower-level dictionary
402         for i in ("architecture", "distribution", "binary", "closes"):
403             o = self.pkg.changes.get(i, "")
404             if o != "":
405                 del self.pkg.changes[i]
406
407             self.pkg.changes[i] = {}
408
409             for j in o.split():
410                 self.pkg.changes[i][j] = 1
411
412         # Fix the Maintainer: field to be RFC822/2047 compatible
413         try:
414             (self.pkg.changes["maintainer822"],
415              self.pkg.changes["maintainer2047"],
416              self.pkg.changes["maintainername"],
417              self.pkg.changes["maintaineremail"]) = \
418                    fix_maintainer (self.pkg.changes["maintainer"])
419         except ParseMaintError, msg:
420             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
421                    % (filename, changes["maintainer"], msg))
422
423         # ...likewise for the Changed-By: field if it exists.
424         try:
425             (self.pkg.changes["changedby822"],
426              self.pkg.changes["changedby2047"],
427              self.pkg.changes["changedbyname"],
428              self.pkg.changes["changedbyemail"]) = \
429                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
430         except ParseMaintError, msg:
431             self.pkg.changes["changedby822"] = ""
432             self.pkg.changes["changedby2047"] = ""
433             self.pkg.changes["changedbyname"] = ""
434             self.pkg.changes["changedbyemail"] = ""
435
436             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
437                    % (filename, changes["changed-by"], msg))
438
439         # Ensure all the values in Closes: are numbers
440         if self.pkg.changes.has_key("closes"):
441             for i in self.pkg.changes["closes"].keys():
442                 if re_isanum.match (i) == None:
443                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
444
445         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
446         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
447         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
448
449         # Check there isn't already a changes file of the same name in one
450         # of the queue directories.
451         base_filename = os.path.basename(filename)
452         for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
453             if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
454                 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
455
456         # Check the .changes is non-empty
457         if not self.pkg.files:
458             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
459             return False
460
461         # Changes was syntactically valid even if we'll reject
462         return True
463
464     ###########################################################################
465
466     def check_distributions(self):
467         "Check and map the Distribution field"
468
469         Cnf = Config()
470
471         # Handle suite mappings
472         for m in Cnf.ValueList("SuiteMappings"):
473             args = m.split()
474             mtype = args[0]
475             if mtype == "map" or mtype == "silent-map":
476                 (source, dest) = args[1:3]
477                 if self.pkg.changes["distribution"].has_key(source):
478                     del self.pkg.changes["distribution"][source]
479                     self.pkg.changes["distribution"][dest] = 1
480                     if mtype != "silent-map":
481                         self.notes.append("Mapping %s to %s." % (source, dest))
482                 if self.pkg.changes.has_key("distribution-version"):
483                     if self.pkg.changes["distribution-version"].has_key(source):
484                         self.pkg.changes["distribution-version"][source]=dest
485             elif mtype == "map-unreleased":
486                 (source, dest) = args[1:3]
487                 if self.pkg.changes["distribution"].has_key(source):
488                     for arch in self.pkg.changes["architecture"].keys():
489                         if arch not in [ arch_string for a in get_suite_architectures(source) ]:
490                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
491                             del self.pkg.changes["distribution"][source]
492                             self.pkg.changes["distribution"][dest] = 1
493                             break
494             elif mtype == "ignore":
495                 suite = args[1]
496                 if self.pkg.changes["distribution"].has_key(suite):
497                     del self.pkg.changes["distribution"][suite]
498                     self.warnings.append("Ignoring %s as a target suite." % (suite))
499             elif mtype == "reject":
500                 suite = args[1]
501                 if self.pkg.changes["distribution"].has_key(suite):
502                     self.rejects.append("Uploads to %s are not accepted." % (suite))
503             elif mtype == "propup-version":
504                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
505                 #
506                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
507                 if self.pkg.changes["distribution"].has_key(args[1]):
508                     self.pkg.changes.setdefault("distribution-version", {})
509                     for suite in args[2:]:
510                         self.pkg.changes["distribution-version"][suite] = suite
511
512         # Ensure there is (still) a target distribution
513         if len(self.pkg.changes["distribution"].keys()) < 1:
514             self.rejects.append("No valid distribution remaining.")
515
516         # Ensure target distributions exist
517         for suite in self.pkg.changes["distribution"].keys():
518             if not Cnf.has_key("Suite::%s" % (suite)):
519                 self.rejects.append("Unknown distribution `%s'." % (suite))
520
521     ###########################################################################
522
523     def binary_file_checks(self, f, session):
524         cnf = Config()
525         entry = self.pkg.files[f]
526
527         # Extract package control information
528         deb_file = utils.open_file(f)
529         try:
530             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
531         except:
532             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
533             deb_file.close()
534             # Can't continue, none of the checks on control would work.
535             return
536
537         # Check for mandantory "Description:"
538         deb_file.seek(0)
539         try:
540             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
541         except:
542             self.rejects.append("%s: Missing Description in binary package" % (f))
543             return
544
545         deb_file.close()
546
547         # Check for mandatory fields
548         for field in [ "Package", "Architecture", "Version" ]:
549             if control.Find(field) == None:
550                 # Can't continue
551                 self.rejects.append("%s: No %s field in control." % (f, field))
552                 return
553
554         # Ensure the package name matches the one give in the .changes
555         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
556             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
557
558         # Validate the package field
559         package = control.Find("Package")
560         if not re_valid_pkg_name.match(package):
561             self.rejects.append("%s: invalid package name '%s'." % (f, package))
562
563         # Validate the version field
564         version = control.Find("Version")
565         if not re_valid_version.match(version):
566             self.rejects.append("%s: invalid version number '%s'." % (f, version))
567
568         # Ensure the architecture of the .deb is one we know about.
569         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
570         architecture = control.Find("Architecture")
571         upload_suite = self.pkg.changes["distribution"].keys()[0]
572
573         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
574             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
575             self.rejects.append("Unknown architecture '%s'." % (architecture))
576
577         # Ensure the architecture of the .deb is one of the ones
578         # listed in the .changes.
579         if not self.pkg.changes["architecture"].has_key(architecture):
580             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
581
582         # Sanity-check the Depends field
583         depends = control.Find("Depends")
584         if depends == '':
585             self.rejects.append("%s: Depends field is empty." % (f))
586
587         # Sanity-check the Provides field
588         provides = control.Find("Provides")
589         if provides:
590             provide = re_spacestrip.sub('', provides)
591             if provide == '':
592                 self.rejects.append("%s: Provides field is empty." % (f))
593             prov_list = provide.split(",")
594             for prov in prov_list:
595                 if not re_valid_pkg_name.match(prov):
596                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
597
598         # Check the section & priority match those given in the .changes (non-fatal)
599         if     control.Find("Section") and entry["section"] != "" \
600            and entry["section"] != control.Find("Section"):
601             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
602                                 (f, control.Find("Section", ""), entry["section"]))
603         if control.Find("Priority") and entry["priority"] != "" \
604            and entry["priority"] != control.Find("Priority"):
605             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
606                                 (f, control.Find("Priority", ""), entry["priority"]))
607
608         entry["package"] = package
609         entry["architecture"] = architecture
610         entry["version"] = version
611         entry["maintainer"] = control.Find("Maintainer", "")
612
613         if f.endswith(".udeb"):
614             self.pkg.files[f]["dbtype"] = "udeb"
615         elif f.endswith(".deb"):
616             self.pkg.files[f]["dbtype"] = "deb"
617         else:
618             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
619
620         entry["source"] = control.Find("Source", entry["package"])
621
622         # Get the source version
623         source = entry["source"]
624         source_version = ""
625
626         if source.find("(") != -1:
627             m = re_extract_src_version.match(source)
628             source = m.group(1)
629             source_version = m.group(2)
630
631         if not source_version:
632             source_version = self.pkg.files[f]["version"]
633
634         entry["source package"] = source
635         entry["source version"] = source_version
636
637         # Ensure the filename matches the contents of the .deb
638         m = re_isadeb.match(f)
639
640         #  package name
641         file_package = m.group(1)
642         if entry["package"] != file_package:
643             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
644                                 (f, file_package, entry["dbtype"], entry["package"]))
645         epochless_version = re_no_epoch.sub('', control.Find("Version"))
646
647         #  version
648         file_version = m.group(2)
649         if epochless_version != file_version:
650             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
651                                 (f, file_version, entry["dbtype"], epochless_version))
652
653         #  architecture
654         file_architecture = m.group(3)
655         if entry["architecture"] != file_architecture:
656             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
657                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
658
659         # Check for existent source
660         source_version = entry["source version"]
661         source_package = entry["source package"]
662         if self.pkg.changes["architecture"].has_key("source"):
663             if source_version != self.pkg.changes["version"]:
664                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
665                                     (source_version, f, self.pkg.changes["version"]))
666         else:
667             # Check in the SQL database
668             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
669                 # Check in one of the other directories
670                 source_epochless_version = re_no_epoch.sub('', source_version)
671                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
672                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
673                     entry["byhand"] = 1
674                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
675                     entry["new"] = 1
676                 else:
677                     dsc_file_exists = False
678                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
679                         if cnf.has_key("Dir::Queue::%s" % (myq)):
680                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
681                                 dsc_file_exists = True
682                                 break
683
684                     if not dsc_file_exists:
685                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
686
687         # Check the version and for file overwrites
688         self.check_binary_against_db(f, session)
689
690         b = Binary(f)
691         b.scan_package()
692         if len(b.rejects) > 0:
693             for j in b.rejects:
694                 self.rejects.append(j)
695
696     def source_file_checks(self, f, session):
697         entry = self.pkg.files[f]
698
699         m = re_issource.match(f)
700         if not m:
701             return
702
703         entry["package"] = m.group(1)
704         entry["version"] = m.group(2)
705         entry["type"] = m.group(3)
706
707         # Ensure the source package name matches the Source filed in the .changes
708         if self.pkg.changes["source"] != entry["package"]:
709             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
710
711         # Ensure the source version matches the version in the .changes file
712         if entry["type"] == "orig.tar.gz":
713             changes_version = self.pkg.changes["chopversion2"]
714         else:
715             changes_version = self.pkg.changes["chopversion"]
716
717         if changes_version != entry["version"]:
718             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
719
720         # Ensure the .changes lists source in the Architecture field
721         if not self.pkg.changes["architecture"].has_key("source"):
722             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
723
724         # Check the signature of a .dsc file
725         if entry["type"] == "dsc":
726             # check_signature returns either:
727             #  (None, [list, of, rejects]) or (signature, [])
728             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
729             for j in rejects:
730                 self.rejects.append(j)
731
732         entry["architecture"] = "source"
733
734     def per_suite_file_checks(self, f, suite, session):
735         cnf = Config()
736         entry = self.pkg.files[f]
737         archive = utils.where_am_i()
738
739         # Skip byhand
740         if entry.has_key("byhand"):
741             return
742
743         # Handle component mappings
744         for m in cnf.ValueList("ComponentMappings"):
745             (source, dest) = m.split()
746             if entry["component"] == source:
747                 entry["original component"] = source
748                 entry["component"] = dest
749
750         # Ensure the component is valid for the target suite
751         if cnf.has_key("Suite:%s::Components" % (suite)) and \
752            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
753             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
754             return
755
756         # Validate the component
757         component = entry["component"]
758         if not get_component(component, session):
759             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
760             return
761
762         # See if the package is NEW
763         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
764             entry["new"] = 1
765
766         # Validate the priority
767         if entry["priority"].find('/') != -1:
768             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
769
770         # Determine the location
771         location = cnf["Dir::Pool"]
772         l = get_location(location, entry["component"], archive, session)
773         if l is None:
774             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
775             entry["location id"] = -1
776         else:
777             entry["location id"] = l.location_id
778
779         # Check the md5sum & size against existing files (if any)
780         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
781
782         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
783                                          entry["size"], entry["md5sum"], entry["location id"])
784
785         if found is None:
786             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
787         elif found is False and poolfile is not None:
788             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
789         else:
790             if poolfile is None:
791                 entry["files id"] = None
792             else:
793                 entry["files id"] = poolfile.file_id
794
795         # Check for packages that have moved from one component to another
796         entry['suite'] = suite
797         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
798         if res.rowcount > 0:
799             entry["othercomponents"] = res.fetchone()[0]
800
801     def check_files(self, action=True):
802         archive = utils.where_am_i()
803         file_keys = self.pkg.files.keys()
804         holding = Holding()
805         cnf = Config()
806
807         # XXX: As far as I can tell, this can no longer happen - see
808         #      comments by AJ in old revisions - mhy
809         # if reprocess is 2 we've already done this and we're checking
810         # things again for the new .orig.tar.gz.
811         # [Yes, I'm fully aware of how disgusting this is]
812         if action and self.reprocess < 2:
813             cwd = os.getcwd()
814             os.chdir(self.pkg.directory)
815             for f in file_keys:
816                 ret = holding.copy_to_holding(f)
817                 if ret is not None:
818                     # XXX: Should we bail out here or try and continue?
819                     self.rejects.append(ret)
820
821             os.chdir(cwd)
822
823         # Check there isn't already a .changes or .dak file of the same name in
824         # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
825         # [NB: this check must be done post-suite mapping]
826         base_filename = os.path.basename(self.pkg.changes_file)
827         dot_dak_filename = base_filename[:-8] + ".dak"
828
829         for suite in self.pkg.changes["distribution"].keys():
830             copychanges = "Suite::%s::CopyChanges" % (suite)
831             if cnf.has_key(copychanges) and \
832                    os.path.exists(os.path.join(cnf[copychanges], base_filename)):
833                 self.rejects.append("%s: a file with this name already exists in %s" \
834                            % (base_filename, cnf[copychanges]))
835
836             copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
837             if cnf.has_key(copy_dot_dak) and \
838                    os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
839                 self.rejects.append("%s: a file with this name already exists in %s" \
840                            % (dot_dak_filename, Cnf[copy_dot_dak]))
841
842         self.reprocess = 0
843         has_binaries = False
844         has_source = False
845
846         session = DBConn().session()
847
848         for f, entry in self.pkg.files.items():
849             # Ensure the file does not already exist in one of the accepted directories
850             for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
851                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
852                 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
853                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
854
855             if not re_taint_free.match(f):
856                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
857
858             # Check the file is readable
859             if os.access(f, os.R_OK) == 0:
860                 # When running in -n, copy_to_holding() won't have
861                 # generated the reject_message, so we need to.
862                 if action:
863                     if os.path.exists(f):
864                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
865                     else:
866                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
867                 entry["type"] = "unreadable"
868                 continue
869
870             # If it's byhand skip remaining checks
871             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
872                 entry["byhand"] = 1
873                 entry["type"] = "byhand"
874
875             # Checks for a binary package...
876             elif re_isadeb.match(f):
877                 has_binaries = True
878                 entry["type"] = "deb"
879
880                 # This routine appends to self.rejects/warnings as appropriate
881                 self.binary_file_checks(f, session)
882
883             # Checks for a source package...
884             elif re_issource.match(f):
885                 has_source = True
886
887                 # This routine appends to self.rejects/warnings as appropriate
888                 self.source_file_checks(f, session)
889
890             # Not a binary or source package?  Assume byhand...
891             else:
892                 entry["byhand"] = 1
893                 entry["type"] = "byhand"
894
895             # Per-suite file checks
896             entry["oldfiles"] = {}
897             for suite in self.pkg.changes["distribution"].keys():
898                 self.per_suite_file_checks(f, suite, session)
899
900         session.close()
901
902         # If the .changes file says it has source, it must have source.
903         if self.pkg.changes["architecture"].has_key("source"):
904             if not has_source:
905                 self.rejects.append("no source found and Architecture line in changes mention source.")
906
907             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
908                 self.rejects.append("source only uploads are not supported.")
909
910     ###########################################################################
911     def check_dsc(self, action=True):
912         """Returns bool indicating whether or not the source changes are valid"""
913         # Ensure there is source to check
914         if not self.pkg.changes["architecture"].has_key("source"):
915             return True
916
917         # Find the .dsc
918         dsc_filename = None
919         for f, entry in self.pkg.files.items():
920             if entry["type"] == "dsc":
921                 if dsc_filename:
922                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
923                     return False
924                 else:
925                     dsc_filename = f
926
927         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
928         if not dsc_filename:
929             self.rejects.append("source uploads must contain a dsc file")
930             return False
931
932         # Parse the .dsc file
933         try:
934             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
935         except CantOpenError:
936             # if not -n copy_to_holding() will have done this for us...
937             if not action:
938                 self.rejects.append("%s: can't read file." % (dsc_filename))
939         except ParseChangesError, line:
940             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941         except InvalidDscError, line:
942             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
943         except ChangesUnicodeError:
944             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
945
946         # Build up the file list of files mentioned by the .dsc
947         try:
948             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
949         except NoFilesFieldError:
950             self.rejects.append("%s: no Files: field." % (dsc_filename))
951             return False
952         except UnknownFormatError, format:
953             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
954             return False
955         except ParseChangesError, line:
956             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
957             return False
958
959         # Enforce mandatory fields
960         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
961             if not self.pkg.dsc.has_key(i):
962                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
963                 return False
964
965         # Validate the source and version fields
966         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
967             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
968         if not re_valid_version.match(self.pkg.dsc["version"]):
969             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
970
971         # Bumping the version number of the .dsc breaks extraction by stable's
972         # dpkg-source.  So let's not do that...
973         if self.pkg.dsc["format"] != "1.0":
974             self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
975
976         # Validate the Maintainer field
977         try:
978             # We ignore the return value
979             fix_maintainer(self.pkg.dsc["maintainer"])
980         except ParseMaintError, msg:
981             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
982                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
983
984         # Validate the build-depends field(s)
985         for field_name in [ "build-depends", "build-depends-indep" ]:
986             field = self.pkg.dsc.get(field_name)
987             if field:
988                 # Check for broken dpkg-dev lossage...
989                 if field.startswith("ARRAY"):
990                     self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
991                                         (dsc_filename, field_name.title()))
992
993                 # Have apt try to parse them...
994                 try:
995                     apt_pkg.ParseSrcDepends(field)
996                 except:
997                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
998
999         # Ensure the version number in the .dsc matches the version number in the .changes
1000         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1001         changes_version = self.pkg.files[dsc_filename]["version"]
1002
1003         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1004             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1005
1006         # Ensure there is a .tar.gz in the .dsc file
1007         has_tar = False
1008         for f in self.pkg.dsc_files.keys():
1009             m = re_issource.match(f)
1010             if not m:
1011                 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1012                 continue
1013             ftype = m.group(3)
1014             if ftype == "orig.tar.gz" or ftype == "tar.gz":
1015                 has_tar = True
1016
1017         if not has_tar:
1018             self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1019
1020         # Ensure source is newer than existing source in target suites
1021         session = DBConn().session()
1022         self.check_source_against_db(dsc_filename, session)
1023         self.check_dsc_against_db(dsc_filename, session)
1024         session.close()
1025
1026         return True
1027
1028     ###########################################################################
1029
1030     def get_changelog_versions(self, source_dir):
1031         """Extracts a the source package and (optionally) grabs the
1032         version history out of debian/changelog for the BTS."""
1033
1034         cnf = Config()
1035
1036         # Find the .dsc (again)
1037         dsc_filename = None
1038         for f in self.pkg.files.keys():
1039             if self.pkg.files[f]["type"] == "dsc":
1040                 dsc_filename = f
1041
1042         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1043         if not dsc_filename:
1044             return
1045
1046         # Create a symlink mirror of the source files in our temporary directory
1047         for f in self.pkg.files.keys():
1048             m = re_issource.match(f)
1049             if m:
1050                 src = os.path.join(source_dir, f)
1051                 # If a file is missing for whatever reason, give up.
1052                 if not os.path.exists(src):
1053                     return
1054                 ftype = m.group(3)
1055                 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1056                     continue
1057                 dest = os.path.join(os.getcwd(), f)
1058                 os.symlink(src, dest)
1059
1060         # If the orig.tar.gz is not a part of the upload, create a symlink to the
1061         # existing copy.
1062         if self.pkg.orig_tar_gz:
1063             dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1064             os.symlink(self.pkg.orig_tar_gz, dest)
1065
1066         # Extract the source
1067         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1068         (result, output) = commands.getstatusoutput(cmd)
1069         if (result != 0):
1070             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1071             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1072             return
1073
1074         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1075             return
1076
1077         # Get the upstream version
1078         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1079         if re_strip_revision.search(upstr_version):
1080             upstr_version = re_strip_revision.sub('', upstr_version)
1081
1082         # Ensure the changelog file exists
1083         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1084         if not os.path.exists(changelog_filename):
1085             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1086             return
1087
1088         # Parse the changelog
1089         self.pkg.dsc["bts changelog"] = ""
1090         changelog_file = utils.open_file(changelog_filename)
1091         for line in changelog_file.readlines():
1092             m = re_changelog_versions.match(line)
1093             if m:
1094                 self.pkg.dsc["bts changelog"] += line
1095         changelog_file.close()
1096
1097         # Check we found at least one revision in the changelog
1098         if not self.pkg.dsc["bts changelog"]:
1099             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1100
1101     def check_source(self):
1102         # XXX: I'm fairly sure reprocess == 2 can never happen
1103         #      AJT disabled the is_incoming check years ago - mhy
1104         #      We should probably scrap or rethink the whole reprocess thing
1105         # Bail out if:
1106         #    a) there's no source
1107         # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1108         # or c) the orig.tar.gz is MIA
1109         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1110            or self.pkg.orig_tar_gz == -1:
1111             return
1112
1113         tmpdir = utils.temp_dirname()
1114
1115         # Move into the temporary directory
1116         cwd = os.getcwd()
1117         os.chdir(tmpdir)
1118
1119         # Get the changelog version history
1120         self.get_changelog_versions(cwd)
1121
1122         # Move back and cleanup the temporary tree
1123         os.chdir(cwd)
1124
1125         try:
1126             shutil.rmtree(tmpdir)
1127         except OSError, e:
1128             if e.errno != errno.EACCES:
1129                 print "foobar"
1130                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1131
1132             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1133             # We probably have u-r or u-w directories so chmod everything
1134             # and try again.
1135             cmd = "chmod -R u+rwx %s" % (tmpdir)
1136             result = os.system(cmd)
1137             if result != 0:
1138                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1139             shutil.rmtree(tmpdir)
1140         except Exception, e:
1141             print "foobar2 (%s)" % e
1142             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1143
1144     ###########################################################################
1145     def ensure_hashes(self):
1146         # Make sure we recognise the format of the Files: field in the .changes
1147         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1148         if len(format) == 2:
1149             format = int(format[0]), int(format[1])
1150         else:
1151             format = int(float(format[0])), 0
1152
1153         # We need to deal with the original changes blob, as the fields we need
1154         # might not be in the changes dict serialised into the .dak anymore.
1155         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1156
1157         # Copy the checksums over to the current changes dict.  This will keep
1158         # the existing modifications to it intact.
1159         for field in orig_changes:
1160             if field.startswith('checksums-'):
1161                 self.pkg.changes[field] = orig_changes[field]
1162
1163         # Check for unsupported hashes
1164         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1165             self.rejects.append(j)
1166
1167         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1168             self.rejects.append(j)
1169
1170         # We have to calculate the hash if we have an earlier changes version than
1171         # the hash appears in rather than require it exist in the changes file
1172         for hashname, hashfunc, version in utils.known_hashes:
1173             # TODO: Move _ensure_changes_hash into this class
1174             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1175                 self.rejects.append(j)
1176             if "source" in self.pkg.changes["architecture"]:
1177                 # TODO: Move _ensure_dsc_hash into this class
1178                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1179                     self.rejects.append(j)
1180
1181     def check_hashes(self):
1182         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1183             self.rejects.append(m)
1184
1185         for m in utils.check_size(".changes", self.pkg.files):
1186             self.rejects.append(m)
1187
1188         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1189             self.rejects.append(m)
1190
1191         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1192             self.rejects.append(m)
1193
1194         self.ensure_hashes()
1195
1196     ###########################################################################
1197     def check_urgency(self):
1198         cnf = Config()
1199         if self.pkg.changes["architecture"].has_key("source"):
1200             if not self.pkg.changes.has_key("urgency"):
1201                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1202             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1203             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1204                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1205                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1206                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1207
1208     ###########################################################################
1209
1210     # Sanity check the time stamps of files inside debs.
1211     # [Files in the near future cause ugly warnings and extreme time
1212     #  travel can cause errors on extraction]
1213
1214     def check_timestamps(self):
1215         Cnf = Config()
1216
1217         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1218         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1219         tar = TarTime(future_cutoff, past_cutoff)
1220
1221         for filename, entry in self.pkg.files.items():
1222             if entry["type"] == "deb":
1223                 tar.reset()
1224                 try:
1225                     deb_file = utils.open_file(filename)
1226                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1227                     deb_file.seek(0)
1228                     try:
1229                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1230                     except SystemError, e:
1231                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1232                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1233                             raise
1234                         deb_file.seek(0)
1235                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1236
1237                     deb_file.close()
1238
1239                     future_files = tar.future_files.keys()
1240                     if future_files:
1241                         num_future_files = len(future_files)
1242                         future_file = future_files[0]
1243                         future_date = tar.future_files[future_file]
1244                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1245                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1246
1247                     ancient_files = tar.ancient_files.keys()
1248                     if ancient_files:
1249                         num_ancient_files = len(ancient_files)
1250                         ancient_file = ancient_files[0]
1251                         ancient_date = tar.ancient_files[ancient_file]
1252                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1253                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1254                 except:
1255                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1256
1257     ###########################################################################
1258     def check_signed_by_key(self):
1259         """Ensure the .changes is signed by an authorized uploader."""
1260         session = DBConn().session()
1261
1262         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1263
1264         # match claimed name with actual name:
1265         if uid is None:
1266             # This is fundamentally broken but need us to refactor how we get
1267             # the UIDs/Fingerprints in order for us to fix it properly
1268             uid, uid_email = self.pkg.changes["fingerprint"], uid
1269             may_nmu, may_sponsor = 1, 1
1270             # XXX by default new dds don't have a fingerprint/uid in the db atm,
1271             #     and can't get one in there if we don't allow nmu/sponsorship
1272         elif is_dm is False:
1273             # If is_dm is False, we allow full upload rights
1274             uid_email = "%s@debian.org" % (uid)
1275             may_nmu, may_sponsor = 1, 1
1276         else:
1277             # Assume limited upload rights unless we've discovered otherwise
1278             uid_email = uid
1279             may_nmu, may_sponsor = 0, 0
1280
1281         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1282             sponsored = 0
1283         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1284             sponsored = 0
1285             if uid_name == "": sponsored = 1
1286         else:
1287             sponsored = 1
1288             if ("source" in self.pkg.changes["architecture"] and
1289                 uid_email and utils.is_email_alias(uid_email)):
1290                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1291                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1292                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1293                     self.pkg.changes["sponsoremail"] = uid_email
1294
1295         if sponsored and not may_sponsor:
1296             self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1297
1298         if not sponsored and not may_nmu:
1299             should_reject = True
1300             highest_sid, highest_version = None, None
1301
1302             # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1303             #      It ignores higher versions with the dm_upload_allowed flag set to false
1304             #      I'm keeping the existing behaviour for now until I've gone back and
1305             #      checked exactly what the GR says - mhy
1306             for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1307                 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1308                      highest_sid = si.source_id
1309                      highest_version = si.version
1310
1311             if highest_sid is None:
1312                 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1313             else:
1314                 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1315                     (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1316                     if email == uid_email or name == uid_name:
1317                         should_reject = False
1318                         break
1319
1320             if should_reject is True:
1321                 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1322
1323             for b in self.pkg.changes["binary"].keys():
1324                 for suite in self.pkg.changes["distribution"].keys():
1325                     q = session.query(DBSource)
1326                     q = q.join(DBBinary).filter_by(package=b)
1327                     q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1328
1329                     for s in q.all():
1330                         if s.source != self.pkg.changes["source"]:
1331                             self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1332
1333             for f in self.pkg.files.keys():
1334                 if self.pkg.files[f].has_key("byhand"):
1335                     self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1336                 if self.pkg.files[f].has_key("new"):
1337                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1338
1339         session.close()
1340
1341     ###########################################################################
1342     def build_summaries(self):
1343         """ Build a summary of changes the upload introduces. """
1344
1345         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1346
1347         short_summary = summary
1348
1349         # This is for direport's benefit...
1350         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1351
1352         if byhand or new:
1353             summary += "Changes: " + f
1354
1355         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1356
1357         summary += self.announce(short_summary, 0)
1358
1359         return (summary, short_summary)
1360
1361     ###########################################################################
1362
1363     def close_bugs(self, summary, action):
1364         """
1365         Send mail to close bugs as instructed by the closes field in the changes file.
1366         Also add a line to summary if any work was done.
1367
1368         @type summary: string
1369         @param summary: summary text, as given by L{build_summaries}
1370
1371         @type action: bool
1372         @param action: Set to false no real action will be done.
1373
1374         @rtype: string
1375         @return: summary. If action was taken, extended by the list of closed bugs.
1376
1377         """
1378
1379         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1380
1381         bugs = self.pkg.changes["closes"].keys()
1382
1383         if not bugs:
1384             return summary
1385
1386         bugs.sort()
1387         summary += "Closing bugs: "
1388         for bug in bugs:
1389             summary += "%s " % (bug)
1390             if action:
1391                 self.Subst["__BUG_NUMBER__"] = bug
1392                 if self.pkg.changes["distribution"].has_key("stable"):
1393                     self.Subst["__STABLE_WARNING__"] = """
1394 Note that this package is not part of the released stable Debian
1395 distribution.  It may have dependencies on other unreleased software,
1396 or other instabilities.  Please take care if you wish to install it.
1397 The update will eventually make its way into the next released Debian
1398 distribution."""
1399                 else:
1400                     self.Subst["__STABLE_WARNING__"] = ""
1401                     mail_message = utils.TemplateSubst(self.Subst, template)
1402                     utils.send_mail(mail_message)
1403
1404                 # Clear up after ourselves
1405                 del self.Subst["__BUG_NUMBER__"]
1406                 del self.Subst["__STABLE_WARNING__"]
1407
1408         if action and self.logger:
1409             self.logger.log(["closing bugs"] + bugs)
1410
1411         summary += "\n"
1412
1413         return summary
1414
1415     ###########################################################################
1416
1417     def announce(self, short_summary, action):
1418         """
1419         Send an announce mail about a new upload.
1420
1421         @type short_summary: string
1422         @param short_summary: Short summary text to include in the mail
1423
1424         @type action: bool
1425         @param action: Set to false no real action will be done.
1426
1427         @rtype: string
1428         @return: Textstring about action taken.
1429
1430         """
1431
1432         cnf = Config()
1433         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1434
1435         # Only do announcements for source uploads with a recent dpkg-dev installed
1436         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1437            self.pkg.changes["architecture"].has_key("source"):
1438             return ""
1439
1440         lists_done = {}
1441         summary = ""
1442
1443         self.Subst["__SHORT_SUMMARY__"] = short_summary
1444
1445         for dist in self.pkg.changes["distribution"].keys():
1446             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1447             if announce_list == "" or lists_done.has_key(announce_list):
1448                 continue
1449
1450             lists_done[announce_list] = 1
1451             summary += "Announcing to %s\n" % (announce_list)
1452
1453             if action:
1454                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1455                 if cnf.get("Dinstall::TrackingServer") and \
1456                    self.pkg.changes["architecture"].has_key("source"):
1457                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1458                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1459
1460                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1461                 utils.send_mail(mail_message)
1462
1463                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1464
1465         if cnf.FindB("Dinstall::CloseBugs"):
1466             summary = self.close_bugs(summary, action)
1467
1468         del self.Subst["__SHORT_SUMMARY__"]
1469
1470         return summary
1471
1472     ###########################################################################
1473
1474     def accept (self, summary, short_summary, targetdir=None):
1475         """
1476         Accept an upload.
1477
1478         This moves all files referenced from the .changes into the I{accepted}
1479         queue, sends the accepted mail, announces to lists, closes bugs and
1480         also checks for override disparities. If enabled it will write out
1481         the version history for the BTS Version Tracking and will finally call
1482         L{queue_build}.
1483
1484         @type summary: string
1485         @param summary: Summary text
1486
1487         @type short_summary: string
1488         @param short_summary: Short summary
1489
1490         """
1491
1492         cnf = Config()
1493         stats = SummaryStats()
1494
1495         accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1496
1497         if targetdir is None:
1498             targetdir = cnf["Dir::Queue::Accepted"]
1499
1500         print "Accepting."
1501         if self.logger:
1502             self.logger.log(["Accepting changes", self.pkg.changes_file])
1503
1504         self.pkg.write_dot_dak(targetdir)
1505
1506         # Move all the files into the accepted directory
1507         utils.move(self.pkg.changes_file, targetdir)
1508
1509         for name, entry in sorted(self.pkg.files.items()):
1510             utils.move(name, targetdir)
1511             stats.accept_bytes += float(entry["size"])
1512
1513         stats.accept_count += 1
1514
1515         # Send accept mail, announce to lists, close bugs and check for
1516         # override disparities
1517         if not cnf["Dinstall::Options::No-Mail"]:
1518             self.Subst["__SUITE__"] = ""
1519             self.Subst["__SUMMARY__"] = summary
1520             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1521             utils.send_mail(mail_message)
1522             self.announce(short_summary, 1)
1523
1524         ## Helper stuff for DebBugs Version Tracking
1525         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1526             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1527             # the conditionalization on dsc["bts changelog"] should be
1528             # dropped.
1529
1530             # Write out the version history from the changelog
1531             if self.pkg.changes["architecture"].has_key("source") and \
1532                self.pkg.dsc.has_key("bts changelog"):
1533
1534                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1535                 version_history = os.fdopen(fd, 'w')
1536                 version_history.write(self.pkg.dsc["bts changelog"])
1537                 version_history.close()
1538                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1539                                       self.pkg.changes_file[:-8]+".versions")
1540                 os.rename(temp_filename, filename)
1541                 os.chmod(filename, 0644)
1542
1543             # Write out the binary -> source mapping.
1544             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1545             debinfo = os.fdopen(fd, 'w')
1546             for name, entry in sorted(self.pkg.files.items()):
1547                 if entry["type"] == "deb":
1548                     line = " ".join([entry["package"], entry["version"],
1549                                      entry["architecture"], entry["source package"],
1550                                      entry["source version"]])
1551                     debinfo.write(line+"\n")
1552             debinfo.close()
1553             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1554                                   self.pkg.changes_file[:-8]+".debinfo")
1555             os.rename(temp_filename, filename)
1556             os.chmod(filename, 0644)
1557
1558         # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1559         # <Ganneff> we do call queue_build too
1560         # <mhy> well yes, we'd have had to if we were inserting into accepted
1561         # <Ganneff> now. thats database only.
1562         # <mhy> urgh, that's going to get messy
1563         # <Ganneff> so i make the p-n call to it *also* using accepted/
1564         # <mhy> but then the packages will be in the queue_build table without the files being there
1565         # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1566         # <mhy> ah, good point
1567         # <Ganneff> so it will work out, as unchecked move it over
1568         # <mhy> that's all completely sick
1569         # <Ganneff> yes
1570
1571         # This routine returns None on success or an error on failure
1572         res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1573         if res:
1574             utils.fubar(res)
1575
1576
1577     def check_override(self):
1578         """
1579         Checks override entries for validity. Mails "Override disparity" warnings,
1580         if that feature is enabled.
1581
1582         Abandons the check if
1583           - override disparity checks are disabled
1584           - mail sending is disabled
1585         """
1586
1587         cnf = Config()
1588
1589         # Abandon the check if:
1590         #  a) override disparity checks have been disabled
1591         #  b) we're not sending mail
1592         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1593            cnf["Dinstall::Options::No-Mail"]:
1594             return
1595
1596         summary = self.pkg.check_override()
1597
1598         if summary == "":
1599             return
1600
1601         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1602
1603         self.Subst["__SUMMARY__"] = summary
1604         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1605         utils.send_mail(mail_message)
1606         del self.Subst["__SUMMARY__"]
1607
1608     ###########################################################################
1609
1610     def remove(self, dir=None):
1611         """
1612         Used (for instance) in p-u to remove the package from unchecked
1613         """
1614         if dir is None:
1615             os.chdir(self.pkg.directory)
1616         else:
1617             os.chdir(dir)
1618
1619         for f in self.pkg.files.keys():
1620             os.unlink(f)
1621         os.unlink(self.pkg.changes_file)
1622
1623     ###########################################################################
1624
1625     def move_to_dir (self, dest, perms=0660, changesperms=0664):
1626         """
1627         Move files to dest with certain perms/changesperms
1628         """
1629         utils.move(self.pkg.changes_file, dest, perms=changesperms)
1630         for f in self.pkg.files.keys():
1631             utils.move(f, dest, perms=perms)
1632
1633     ###########################################################################
1634
1635     def force_reject(self, reject_files):
1636         """
1637         Forcefully move files from the current directory to the
1638         reject directory.  If any file already exists in the reject
1639         directory it will be moved to the morgue to make way for
1640         the new file.
1641
1642         @type files: dict
1643         @param files: file dictionary
1644
1645         """
1646
1647         cnf = Config()
1648
1649         for file_entry in reject_files:
1650             # Skip any files which don't exist or which we don't have permission to copy.
1651             if os.access(file_entry, os.R_OK) == 0:
1652                 continue
1653
1654             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1655
1656             try:
1657                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1658             except OSError, e:
1659                 # File exists?  Let's try and move it to the morgue
1660                 if e.errno == errno.EEXIST:
1661                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1662                     try:
1663                         morgue_file = utils.find_next_free(morgue_file)
1664                     except NoFreeFilenameError:
1665                         # Something's either gone badly Pete Tong, or
1666                         # someone is trying to exploit us.
1667                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1668                         return
1669                     utils.move(dest_file, morgue_file, perms=0660)
1670                     try:
1671                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1672                     except OSError, e:
1673                         # Likewise
1674                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1675                         return
1676                 else:
1677                     raise
1678             # If we got here, we own the destination file, so we can
1679             # safely overwrite it.
1680             utils.move(file_entry, dest_file, 1, perms=0660)
1681             os.close(dest_fd)
1682
1683     ###########################################################################
1684     def do_reject (self, manual=0, reject_message="", note=""):
1685         """
1686         Reject an upload. If called without a reject message or C{manual} is
1687         true, spawn an editor so the user can write one.
1688
1689         @type manual: bool
1690         @param manual: manual or automated rejection
1691
1692         @type reject_message: string
1693         @param reject_message: A reject message
1694
1695         @return: 0
1696
1697         """
1698         # If we weren't given a manual rejection message, spawn an
1699         # editor so the user can add one in...
1700         if manual and not reject_message:
1701             (fd, temp_filename) = utils.temp_filename()
1702             temp_file = os.fdopen(fd, 'w')
1703             if len(note) > 0:
1704                 for line in note:
1705                     temp_file.write(line)
1706             temp_file.close()
1707             editor = os.environ.get("EDITOR","vi")
1708             answer = 'E'
1709             while answer == 'E':
1710                 os.system("%s %s" % (editor, temp_filename))
1711                 temp_fh = utils.open_file(temp_filename)
1712                 reject_message = "".join(temp_fh.readlines())
1713                 temp_fh.close()
1714                 print "Reject message:"
1715                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
1716                 prompt = "[R]eject, Edit, Abandon, Quit ?"
1717                 answer = "XXX"
1718                 while prompt.find(answer) == -1:
1719                     answer = utils.our_raw_input(prompt)
1720                     m = re_default_answer.search(prompt)
1721                     if answer == "":
1722                         answer = m.group(1)
1723                     answer = answer[:1].upper()
1724             os.unlink(temp_filename)
1725             if answer == 'A':
1726                 return 1
1727             elif answer == 'Q':
1728                 sys.exit(0)
1729
1730         print "Rejecting.\n"
1731
1732         cnf = Config()
1733
1734         reason_filename = self.pkg.changes_file[:-8] + ".reason"
1735         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1736
1737         # Move all the files into the reject directory
1738         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1739         self.force_reject(reject_files)
1740
1741         # If we fail here someone is probably trying to exploit the race
1742         # so let's just raise an exception ...
1743         if os.path.exists(reason_filename):
1744             os.unlink(reason_filename)
1745         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1746
1747         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1748
1749         if not manual:
1750             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1751             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1752             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1753             os.write(reason_fd, reject_message)
1754             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1755         else:
1756             # Build up the rejection email
1757             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1758             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1759             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1760             self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1761             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1762             # Write the rejection email out as the <foo>.reason file
1763             os.write(reason_fd, reject_mail_message)
1764
1765         del self.Subst["__REJECTOR_ADDRESS__"]
1766         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1767         del self.Subst["__CC__"]
1768
1769         os.close(reason_fd)
1770
1771         # Send the rejection mail if appropriate
1772         if not cnf["Dinstall::Options::No-Mail"]:
1773             utils.send_mail(reject_mail_message)
1774
1775         if self.logger:
1776             self.logger.log(["rejected", self.pkg.changes_file])
1777
1778         return 0
1779
1780     ################################################################################
1781     def in_override_p(self, package, component, suite, binary_type, file, session):
1782         """
1783         Check if a package already has override entries in the DB
1784
1785         @type package: string
1786         @param package: package name
1787
1788         @type component: string
1789         @param component: database id of the component
1790
1791         @type suite: int
1792         @param suite: database id of the suite
1793
1794         @type binary_type: string
1795         @param binary_type: type of the package
1796
1797         @type file: string
1798         @param file: filename we check
1799
1800         @return: the database result. But noone cares anyway.
1801
1802         """
1803
1804         cnf = Config()
1805
1806         if binary_type == "": # must be source
1807             file_type = "dsc"
1808         else:
1809             file_type = binary_type
1810
1811         # Override suite name; used for example with proposed-updates
1812         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1813             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1814
1815         result = get_override(package, suite, component, file_type, session)
1816
1817         # If checking for a source package fall back on the binary override type
1818         if file_type == "dsc" and len(result) < 1:
1819             result = get_override(package, suite, component, ['deb', 'udeb'], session)
1820
1821         # Remember the section and priority so we can check them later if appropriate
1822         if len(result) > 0:
1823             result = result[0]
1824             self.pkg.files[file]["override section"] = result.section.section
1825             self.pkg.files[file]["override priority"] = result.priority.priority
1826             return result
1827
1828         return None
1829
1830     ################################################################################
1831     def get_anyversion(self, sv_list, suite):
1832         """
1833         @type sv_list: list
1834         @param sv_list: list of (suite, version) tuples to check
1835
1836         @type suite: string
1837         @param suite: suite name
1838
1839         Description: TODO
1840         """
1841         Cnf = Config()
1842         anyversion = None
1843         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1844         for (s, v) in sv_list:
1845             if s in [ x.lower() for x in anysuite ]:
1846                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1847                     anyversion = v
1848
1849         return anyversion
1850
1851     ################################################################################
1852
1853     def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1854         """
1855         @type sv_list: list
1856         @param sv_list: list of (suite, version) tuples to check
1857
1858         @type file: string
1859         @param file: XXX
1860
1861         @type new_version: string
1862         @param new_version: XXX
1863
1864         Ensure versions are newer than existing packages in target
1865         suites and that cross-suite version checking rules as
1866         set out in the conf file are satisfied.
1867         """
1868
1869         cnf = Config()
1870
1871         # Check versions for each target suite
1872         for target_suite in self.pkg.changes["distribution"].keys():
1873             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1874             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1875
1876             # Enforce "must be newer than target suite" even if conffile omits it
1877             if target_suite not in must_be_newer_than:
1878                 must_be_newer_than.append(target_suite)
1879
1880             for (suite, existent_version) in sv_list:
1881                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1882
1883                 if suite in must_be_newer_than and sourceful and vercmp < 1:
1884                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1885
1886                 if suite in must_be_older_than and vercmp > -1:
1887                     cansave = 0
1888
1889                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1890                         # we really use the other suite, ignoring the conflicting one ...
1891                         addsuite = self.pkg.changes["distribution-version"][suite]
1892
1893                         add_version = self.get_anyversion(sv_list, addsuite)
1894                         target_version = self.get_anyversion(sv_list, target_suite)
1895
1896                         if not add_version:
1897                             # not add_version can only happen if we map to a suite
1898                             # that doesn't enhance the suite we're propup'ing from.
1899                             # so "propup-ver x a b c; map a d" is a problem only if
1900                             # d doesn't enhance a.
1901                             #
1902                             # i think we could always propagate in this case, rather
1903                             # than complaining. either way, this isn't a REJECT issue
1904                             #
1905                             # And - we really should complain to the dorks who configured dak
1906                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1907                             self.pkg.changes.setdefault("propdistribution", {})
1908                             self.pkg.changes["propdistribution"][addsuite] = 1
1909                             cansave = 1
1910                         elif not target_version:
1911                             # not targets_version is true when the package is NEW
1912                             # we could just stick with the "...old version..." REJECT
1913                             # for this, I think.
1914                             self.rejects.append("Won't propogate NEW packages.")
1915                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1916                             # propogation would be redundant. no need to reject though.
1917                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1918                             cansave = 1
1919                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1920                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
1921                             # propogate!!
1922                             self.warnings.append("Propogating upload to %s" % (addsuite))
1923                             self.pkg.changes.setdefault("propdistribution", {})
1924                             self.pkg.changes["propdistribution"][addsuite] = 1
1925                             cansave = 1
1926
1927                     if not cansave:
1928                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1929
1930     ################################################################################
1931     def check_binary_against_db(self, file, session):
1932         # Ensure version is sane
1933         q = session.query(BinAssociation)
1934         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1935         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1936
1937         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1938                                        file, self.pkg.files[file]["version"], sourceful=False)
1939
1940         # Check for any existing copies of the file
1941         q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1942         q = q.filter_by(version=self.pkg.files[file]["version"])
1943         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1944
1945         if q.count() > 0:
1946             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1947
1948     ################################################################################
1949
1950     def check_source_against_db(self, file, session):
1951         """
1952         """
1953         source = self.pkg.dsc.get("source")
1954         version = self.pkg.dsc.get("version")
1955
1956         # Ensure version is sane
1957         q = session.query(SrcAssociation)
1958         q = q.join(DBSource).filter(DBSource.source==source)
1959
1960         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1961                                        file, version, sourceful=True)
1962
1963     ################################################################################
1964     def check_dsc_against_db(self, file, session):
1965         """
1966
1967         @warning: NB: this function can remove entries from the 'files' index [if
1968          the .orig.tar.gz is a duplicate of the one in the archive]; if
1969          you're iterating over 'files' and call this function as part of
1970          the loop, be sure to add a check to the top of the loop to
1971          ensure you haven't just tried to dereference the deleted entry.
1972
1973         """
1974
1975         Cnf = Config()
1976         self.pkg.orig_tar_gz = None
1977
1978         # Try and find all files mentioned in the .dsc.  This has
1979         # to work harder to cope with the multiple possible
1980         # locations of an .orig.tar.gz.
1981         # The ordering on the select is needed to pick the newest orig
1982         # when it exists in multiple places.
1983         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1984             found = None
1985             if self.pkg.files.has_key(dsc_name):
1986                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1987                 actual_size = int(self.pkg.files[dsc_name]["size"])
1988                 found = "%s in incoming" % (dsc_name)
1989
1990                 # Check the file does not already exist in the archive
1991                 ql = get_poolfile_like_name(dsc_name, session)
1992
1993                 # Strip out anything that isn't '%s' or '/%s$'
1994                 for i in ql:
1995                     if not i.filename.endswith(dsc_name):
1996                         ql.remove(i)
1997
1998                 # "[dak] has not broken them.  [dak] has fixed a
1999                 # brokenness.  Your crappy hack exploited a bug in
2000                 # the old dinstall.
2001                 #
2002                 # "(Come on!  I thought it was always obvious that
2003                 # one just doesn't release different files with
2004                 # the same name and version.)"
2005                 #                        -- ajk@ on d-devel@l.d.o
2006
2007                 if len(ql) > 0:
2008                     # Ignore exact matches for .orig.tar.gz
2009                     match = 0
2010                     if dsc_name.endswith(".orig.tar.gz"):
2011                         for i in ql:
2012                             if self.pkg.files.has_key(dsc_name) and \
2013                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2014                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2015                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2016                                 # TODO: Don't delete the entry, just mark it as not needed
2017                                 # This would fix the stupidity of changing something we often iterate over
2018                                 # whilst we're doing it
2019                                 del self.pkg.files[dsc_name]
2020                                 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2021                                 match = 1
2022
2023                     if not match:
2024                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2025
2026             elif dsc_name.endswith(".orig.tar.gz"):
2027                 # Check in the pool
2028                 ql = get_poolfile_like_name(dsc_name, session)
2029
2030                 # Strip out anything that isn't '%s' or '/%s$'
2031                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2032                 for i in ql:
2033                     if not i.filename.endswith(dsc_name):
2034                         ql.remove(i)
2035
2036                 if len(ql) > 0:
2037                     # Unfortunately, we may get more than one match here if,
2038                     # for example, the package was in potato but had an -sa
2039                     # upload in woody.  So we need to choose the right one.
2040
2041                     # default to something sane in case we don't match any or have only one
2042                     x = ql[0]
2043
2044                     if len(ql) > 1:
2045                         for i in ql:
2046                             old_file = os.path.join(i.location.path, i.filename)
2047                             old_file_fh = utils.open_file(old_file)
2048                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2049                             old_file_fh.close()
2050                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2051                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2052                                 x = i
2053
2054                     old_file = os.path.join(i.location.path, i.filename)
2055                     old_file_fh = utils.open_file(old_file)
2056                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2057                     old_file_fh.close()
2058                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2059                     found = old_file
2060                     suite_type = x.location.archive_type
2061                     # need this for updating dsc_files in install()
2062                     dsc_entry["files id"] = x.file_id
2063                     # See install() in process-accepted...
2064                     self.pkg.orig_tar_id = x.file_id
2065                     self.pkg.orig_tar_gz = old_file
2066                     self.pkg.orig_tar_location = x.location.location_id
2067                 else:
2068                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2069                     # Not there? Check the queue directories...
2070                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2071                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2072                         if os.path.exists(in_otherdir):
2073                             in_otherdir_fh = utils.open_file(in_otherdir)
2074                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2075                             in_otherdir_fh.close()
2076                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2077                             found = in_otherdir
2078                             self.pkg.orig_tar_gz = in_otherdir
2079
2080                     if not found:
2081                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2082                         self.pkg.orig_tar_gz = -1
2083                         continue
2084             else:
2085                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2086                 continue
2087             if actual_md5 != dsc_entry["md5sum"]:
2088                 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2089             if actual_size != int(dsc_entry["size"]):
2090                 self.rejects.append("size for %s doesn't match %s." % (found, file))
2091
2092     ################################################################################
2093     def accepted_checks(self, overwrite_checks, session):
2094         # Recheck anything that relies on the database; since that's not
2095         # frozen between accept and our run time when called from p-a.
2096
2097         # overwrite_checks is set to False when installing to stable/oldstable
2098
2099         propogate={}
2100         nopropogate={}
2101
2102         # Find the .dsc (again)
2103         dsc_filename = None
2104         for f in self.pkg.files.keys():
2105             if self.pkg.files[f]["type"] == "dsc":
2106                 dsc_filename = f
2107
2108         for checkfile in self.pkg.files.keys():
2109             # The .orig.tar.gz can disappear out from under us is it's a
2110             # duplicate of one in the archive.
2111             if not self.pkg.files.has_key(checkfile):
2112                 continue
2113
2114             entry = self.pkg.files[checkfile]
2115
2116             # Check that the source still exists
2117             if entry["type"] == "deb":
2118                 source_version = entry["source version"]
2119                 source_package = entry["source package"]
2120                 if not self.pkg.changes["architecture"].has_key("source") \
2121                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2122                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2123
2124             # Version and file overwrite checks
2125             if overwrite_checks:
2126                 if entry["type"] == "deb":
2127                     self.check_binary_against_db(checkfile, session)
2128                 elif entry["type"] == "dsc":
2129                     self.check_source_against_db(checkfile, session)
2130                     self.check_dsc_against_db(dsc_filename, session)
2131
2132             # propogate in the case it is in the override tables:
2133             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2134                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2135                     propogate[suite] = 1
2136                 else:
2137                     nopropogate[suite] = 1
2138
2139         for suite in propogate.keys():
2140             if suite in nopropogate:
2141                 continue
2142             self.pkg.changes["distribution"][suite] = 1
2143
2144         for checkfile in self.pkg.files.keys():
2145             # Check the package is still in the override tables
2146             for suite in self.pkg.changes["distribution"].keys():
2147                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2148                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2149
2150     ################################################################################
2151     # This is not really a reject, but an unaccept, but since a) the code for
2152     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2153     # extremely rare, for now we'll go with whining at our admin folks...
2154
2155     def do_unaccept(self):
2156         cnf = Config()
2157
2158         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2159         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2160         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2161         self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2162         if cnf.has_key("Dinstall::Bcc"):
2163             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2164
2165         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2166
2167         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2168
2169         # Write the rejection email out as the <foo>.reason file
2170         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2171         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2172
2173         # If we fail here someone is probably trying to exploit the race
2174         # so let's just raise an exception ...
2175         if os.path.exists(reject_filename):
2176             os.unlink(reject_filename)
2177
2178         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2179         os.write(fd, reject_mail_message)
2180         os.close(fd)
2181
2182         utils.send_mail(reject_mail_message)
2183
2184         del self.Subst["__REJECTOR_ADDRESS__"]
2185         del self.Subst["__REJECT_MESSAGE__"]
2186         del self.Subst["__CC__"]
2187
2188     ################################################################################
2189     # If any file of an upload has a recent mtime then chances are good
2190     # the file is still being uploaded.
2191
2192     def upload_too_new(self):
2193         cnf = Config()
2194         too_new = False
2195         # Move back to the original directory to get accurate time stamps
2196         cwd = os.getcwd()
2197         os.chdir(self.pkg.directory)
2198         file_list = self.pkg.files.keys()
2199         file_list.extend(self.pkg.dsc_files.keys())
2200         file_list.append(self.pkg.changes_file)
2201         for f in file_list:
2202             try:
2203                 last_modified = time.time()-os.path.getmtime(f)
2204                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2205                     too_new = True
2206                     break
2207             except:
2208                 pass
2209
2210         os.chdir(cwd)
2211         return too_new