]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
typo fixes
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import cPickle
30 import errno
31 import os
32 import pg
33 import stat
34 import sys
35 import time
36 import apt_inst
37 import apt_pkg
38 import utils
39 import commands
40 import shutil
41 from types import *
42
43 from dak_exceptions import *
44 from changes import *
45 from regexes import *
46 from config import Config
47 from holding import Holding
48 from dbconn import *
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
53
54 ###############################################################################
55
56 def get_type(f, session):
57     """
58     Get the file type of C{f}
59
60     @type f: dict
61     @param f: file entry from Changes object
62
63     @type session: SQLA Session
64     @param session: SQL Alchemy session object
65
66     @rtype: string
67     @return: filetype
68
69     """
70     # Determine the type
71     if f.has_key("dbtype"):
72         file_type = file["dbtype"]
73     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
74         file_type = "dsc"
75     else:
76         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
77
78     # Validate the override type
79     type_id = get_override_type(file_type, session)
80     if type_id is None:
81         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
82
83     return file_type
84
85 ################################################################################
86
87 # Determine what parts in a .changes are NEW
88
89 def determine_new(changes, files, warn=1):
90     """
91     Determine what parts in a C{changes} file are NEW.
92
93     @type changes: Upload.Pkg.changes dict
94     @param changes: Changes dictionary
95
96     @type files: Upload.Pkg.files dict
97     @param files: Files dictionary
98
99     @type warn: bool
100     @param warn: Warn if overrides are added for (old)stable
101
102     @rtype: dict
103     @return: dictionary of NEW components.
104
105     """
106     new = {}
107
108     session = DBConn().session()
109
110     # Build up a list of potentially new things
111     for name, f in files.items():
112         # Skip byhand elements
113         if f["type"] == "byhand":
114             continue
115         pkg = f["package"]
116         priority = f["priority"]
117         section = f["section"]
118         file_type = get_type(f, session)
119         component = f["component"]
120
121         if file_type == "dsc":
122             priority = "source"
123
124         if not new.has_key(pkg):
125             new[pkg] = {}
126             new[pkg]["priority"] = priority
127             new[pkg]["section"] = section
128             new[pkg]["type"] = file_type
129             new[pkg]["component"] = component
130             new[pkg]["files"] = []
131         else:
132             old_type = new[pkg]["type"]
133             if old_type != file_type:
134                 # source gets trumped by deb or udeb
135                 if old_type == "dsc":
136                     new[pkg]["priority"] = priority
137                     new[pkg]["section"] = section
138                     new[pkg]["type"] = file_type
139                     new[pkg]["component"] = component
140
141         new[pkg]["files"].append(name)
142
143         if f.has_key("othercomponents"):
144             new[pkg]["othercomponents"] = f["othercomponents"]
145
146     for suite in changes["suite"].keys():
147         for pkg in new.keys():
148             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
149             if len(ql) > 0:
150                 for file_entry in new[pkg]["files"]:
151                     if files[file_entry].has_key("new"):
152                         del files[file_entry]["new"]
153                 del new[pkg]
154
155     if warn:
156         for s in ['stable', 'oldstable']:
157             if changes["suite"].has_key(s):
158                 print "WARNING: overrides will be added for %s!" % s
159         for pkg in new.keys():
160             if new[pkg].has_key("othercomponents"):
161                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
162
163     session.close()
164
165     return new
166
167 ################################################################################
168
169 def check_valid(new):
170     """
171     Check if section and priority for NEW packages exist in database.
172     Additionally does sanity checks:
173       - debian-installer packages have to be udeb (or source)
174       - non debian-installer packages can not be udeb
175       - source priority can only be assigned to dsc file types
176
177     @type new: dict
178     @param new: Dict of new packages with their section, priority and type.
179
180     """
181     for pkg in new.keys():
182         section_name = new[pkg]["section"]
183         priority_name = new[pkg]["priority"]
184         file_type = new[pkg]["type"]
185
186         section = get_section(section_name)
187         if section is None:
188             new[pkg]["section id"] = -1
189         else:
190             new[pkg]["section id"] = section.section_id
191
192         priority = get_priority(priority_name)
193         if priority is None:
194             new[pkg]["priority id"] = -1
195         else:
196             new[pkg]["priority id"] = priority.priority_id
197
198         # Sanity checks
199         di = section_name.find("debian-installer") != -1
200
201         # If d-i, we must be udeb and vice-versa
202         if     (di and file_type not in ("udeb", "dsc")) or \
203            (not di and file_type == "udeb"):
204             new[pkg]["section id"] = -1
205
206         # If dsc we need to be source and vice-versa
207         if (priority == "source" and file_type != "dsc") or \
208            (priority != "source" and file_type == "dsc"):
209             new[pkg]["priority id"] = -1
210
211 ###############################################################################
212
213 def lookup_uid_from_fingerprint(fpr, session):
214     uid = None
215     uid_name = ""
216     # This is a stupid default, but see the comments below
217     is_dm = False
218
219     user = get_uid_from_fingerprint(fpr, session)
220
221     if user is not None:
222         uid = user.uid
223         if user.name is None:
224             uid_name = ''
225         else:
226             uid_name = user.name
227
228         # Check the relevant fingerprint (which we have to have)
229         for f in user.fingerprint:
230             if f.fingerprint == fpr:
231                 is_dm = f.keyring.debian_maintainer
232                 break
233
234     return (uid, uid_name, is_dm)
235
236 ###############################################################################
237
238 # Used by Upload.check_timestamps
239 class TarTime(object):
240     def __init__(self, future_cutoff, past_cutoff):
241         self.reset()
242         self.future_cutoff = future_cutoff
243         self.past_cutoff = past_cutoff
244
245     def reset(self):
246         self.future_files = {}
247         self.ancient_files = {}
248
249     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
250         if MTime > self.future_cutoff:
251             self.future_files[Name] = MTime
252         if MTime < self.past_cutoff:
253             self.ancient_files[Name] = MTime
254
255 ###############################################################################
256
257 class Upload(object):
258     """
259     Everything that has to do with an upload processed.
260
261     """
262     def __init__(self):
263         self.logger = None
264         self.pkg = Changes()
265         self.reset()
266
267     ###########################################################################
268
269     def reset (self):
270         """ Reset a number of internal variables."""
271
272         # Initialize the substitution template map
273         cnf = Config()
274         self.Subst = {}
275         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
276         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
277         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
278         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
279
280         self.rejects = []
281         self.warnings = []
282         self.notes = []
283
284         self.pkg.reset()
285
286     def package_info(self):
287         msg = ''
288
289         if len(self.rejects) > 0:
290             msg += "Reject Reasons:\n"
291             msg += "\n".join(self.rejects)
292
293         if len(self.warnings) > 0:
294             msg += "Warnings:\n"
295             msg += "\n".join(self.warnings)
296
297         if len(self.notes) > 0:
298             msg += "Notes:\n"
299             msg += "\n".join(self.notes)
300
301         return msg
302
303     ###########################################################################
304     def update_subst(self):
305         """ Set up the per-package template substitution mappings """
306
307         cnf = Config()
308
309         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310         if not self.pkg.changes.has_key("architecture") or not \
311            isinstance(self.pkg.changes["architecture"], DictType):
312             self.pkg.changes["architecture"] = { "Unknown" : "" }
313
314         # and maintainer2047 may not exist.
315         if not self.pkg.changes.has_key("maintainer2047"):
316             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
317
318         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
321
322         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323         if self.pkg.changes["architecture"].has_key("source") and \
324            self.pkg.changes["changedby822"] != "" and \
325            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
326
327             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
330         else:
331             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
334
335         if "sponsoremail" in self.pkg.changes:
336             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
337
338         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
340
341         # Apply any global override of the Maintainer field
342         if cnf.get("Dinstall::OverrideMaintainer"):
343             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
345
346         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
349
350     ###########################################################################
351     def load_changes(self, filename):
352         """
353         @rtype boolean
354         @rvalue: whether the changes file was valid or not.  We may want to
355                  reject even if this is True (see what gets put in self.rejects).
356                  This is simply to prevent us even trying things later which will
357                  fail because we couldn't properly parse the file.
358         """
359         Cnf = Config()
360         self.pkg.changes_file = filename
361
362         # Parse the .changes field into a dictionary
363         try:
364             self.pkg.changes.update(parse_changes(filename))
365         except CantOpenError:
366             self.rejects.append("%s: can't read file." % (filename))
367             return False
368         except ParseChangesError, line:
369             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
370             return False
371         except ChangesUnicodeError:
372             self.rejects.append("%s: changes file not proper utf-8" % (filename))
373             return False
374
375         # Parse the Files field from the .changes into another dictionary
376         try:
377             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
378         except ParseChangesError, line:
379             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
380             return False
381         except UnknownFormatError, format:
382             self.rejects.append("%s: unknown format '%s'." % (filename, format))
383             return False
384
385         # Check for mandatory fields
386         for i in ("distribution", "source", "binary", "architecture",
387                   "version", "maintainer", "files", "changes", "description"):
388             if not self.pkg.changes.has_key(i):
389                 # Avoid undefined errors later
390                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
391                 return False
392
393         # Strip a source version in brackets from the source field
394         if re_strip_srcver.search(self.pkg.changes["source"]):
395             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
396
397         # Ensure the source field is a valid package name.
398         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
399             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
400
401         # Split multi-value fields into a lower-level dictionary
402         for i in ("architecture", "distribution", "binary", "closes"):
403             o = self.pkg.changes.get(i, "")
404             if o != "":
405                 del self.pkg.changes[i]
406
407             self.pkg.changes[i] = {}
408
409             for j in o.split():
410                 self.pkg.changes[i][j] = 1
411
412         # Fix the Maintainer: field to be RFC822/2047 compatible
413         try:
414             (self.pkg.changes["maintainer822"],
415              self.pkg.changes["maintainer2047"],
416              self.pkg.changes["maintainername"],
417              self.pkg.changes["maintaineremail"]) = \
418                    fix_maintainer (self.pkg.changes["maintainer"])
419         except ParseMaintError, msg:
420             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
421                    % (filename, changes["maintainer"], msg))
422
423         # ...likewise for the Changed-By: field if it exists.
424         try:
425             (self.pkg.changes["changedby822"],
426              self.pkg.changes["changedby2047"],
427              self.pkg.changes["changedbyname"],
428              self.pkg.changes["changedbyemail"]) = \
429                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
430         except ParseMaintError, msg:
431             self.pkg.changes["changedby822"] = ""
432             self.pkg.changes["changedby2047"] = ""
433             self.pkg.changes["changedbyname"] = ""
434             self.pkg.changes["changedbyemail"] = ""
435
436             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
437                    % (filename, changes["changed-by"], msg))
438
439         # Ensure all the values in Closes: are numbers
440         if self.pkg.changes.has_key("closes"):
441             for i in self.pkg.changes["closes"].keys():
442                 if re_isanum.match (i) == None:
443                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
444
445         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
446         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
447         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
448
449         # Check there isn't already a changes file of the same name in one
450         # of the queue directories.
451         base_filename = os.path.basename(filename)
452         for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
453             if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
454                 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
455
456         # Check the .changes is non-empty
457         if not self.pkg.files:
458             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
459             return False
460
461         # Changes was syntactically valid even if we'll reject
462         return True
463
464     ###########################################################################
465
466     def check_distributions(self):
467         "Check and map the Distribution field"
468
469         Cnf = Config()
470
471         # Handle suite mappings
472         for m in Cnf.ValueList("SuiteMappings"):
473             args = m.split()
474             mtype = args[0]
475             if mtype == "map" or mtype == "silent-map":
476                 (source, dest) = args[1:3]
477                 if self.pkg.changes["distribution"].has_key(source):
478                     del self.pkg.changes["distribution"][source]
479                     self.pkg.changes["distribution"][dest] = 1
480                     if mtype != "silent-map":
481                         self.notes.append("Mapping %s to %s." % (source, dest))
482                 if self.pkg.changes.has_key("distribution-version"):
483                     if self.pkg.changes["distribution-version"].has_key(source):
484                         self.pkg.changes["distribution-version"][source]=dest
485             elif mtype == "map-unreleased":
486                 (source, dest) = args[1:3]
487                 if self.pkg.changes["distribution"].has_key(source):
488                     for arch in self.pkg.changes["architecture"].keys():
489                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
490                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
491                             del self.pkg.changes["distribution"][source]
492                             self.pkg.changes["distribution"][dest] = 1
493                             break
494             elif mtype == "ignore":
495                 suite = args[1]
496                 if self.pkg.changes["distribution"].has_key(suite):
497                     del self.pkg.changes["distribution"][suite]
498                     self.warnings.append("Ignoring %s as a target suite." % (suite))
499             elif mtype == "reject":
500                 suite = args[1]
501                 if self.pkg.changes["distribution"].has_key(suite):
502                     self.rejects.append("Uploads to %s are not accepted." % (suite))
503             elif mtype == "propup-version":
504                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
505                 #
506                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
507                 if self.pkg.changes["distribution"].has_key(args[1]):
508                     self.pkg.changes.setdefault("distribution-version", {})
509                     for suite in args[2:]:
510                         self.pkg.changes["distribution-version"][suite] = suite
511
512         # Ensure there is (still) a target distribution
513         if len(self.pkg.changes["distribution"].keys()) < 1:
514             self.rejects.append("No valid distribution remaining.")
515
516         # Ensure target distributions exist
517         for suite in self.pkg.changes["distribution"].keys():
518             if not Cnf.has_key("Suite::%s" % (suite)):
519                 self.rejects.append("Unknown distribution `%s'." % (suite))
520
521     ###########################################################################
522
523     def binary_file_checks(self, f, session):
524         cnf = Config()
525         entry = self.pkg.files[f]
526
527         # Extract package control information
528         deb_file = utils.open_file(f)
529         try:
530             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
531         except:
532             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
533             deb_file.close()
534             # Can't continue, none of the checks on control would work.
535             return
536
537         # Check for mandantory "Description:"
538         deb_file.seek(0)
539         try:
540             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
541         except:
542             self.rejects.append("%s: Missing Description in binary package" % (f))
543             return
544
545         deb_file.close()
546
547         # Check for mandatory fields
548         for field in [ "Package", "Architecture", "Version" ]:
549             if control.Find(field) == None:
550                 # Can't continue
551                 self.rejects.append("%s: No %s field in control." % (f, field))
552                 return
553
554         # Ensure the package name matches the one give in the .changes
555         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
556             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
557
558         # Validate the package field
559         package = control.Find("Package")
560         if not re_valid_pkg_name.match(package):
561             self.rejects.append("%s: invalid package name '%s'." % (f, package))
562
563         # Validate the version field
564         version = control.Find("Version")
565         if not re_valid_version.match(version):
566             self.rejects.append("%s: invalid version number '%s'." % (f, version))
567
568         # Ensure the architecture of the .deb is one we know about.
569         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
570         architecture = control.Find("Architecture")
571         upload_suite = self.pkg.changes["distribution"].keys()[0]
572
573         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
574             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
575             self.rejects.append("Unknown architecture '%s'." % (architecture))
576
577         # Ensure the architecture of the .deb is one of the ones
578         # listed in the .changes.
579         if not self.pkg.changes["architecture"].has_key(architecture):
580             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
581
582         # Sanity-check the Depends field
583         depends = control.Find("Depends")
584         if depends == '':
585             self.rejects.append("%s: Depends field is empty." % (f))
586
587         # Sanity-check the Provides field
588         provides = control.Find("Provides")
589         if provides:
590             provide = re_spacestrip.sub('', provides)
591             if provide == '':
592                 self.rejects.append("%s: Provides field is empty." % (f))
593             prov_list = provide.split(",")
594             for prov in prov_list:
595                 if not re_valid_pkg_name.match(prov):
596                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
597
598         # Check the section & priority match those given in the .changes (non-fatal)
599         if     control.Find("Section") and entry["section"] != "" \
600            and entry["section"] != control.Find("Section"):
601             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
602                                 (f, control.Find("Section", ""), entry["section"]))
603         if control.Find("Priority") and entry["priority"] != "" \
604            and entry["priority"] != control.Find("Priority"):
605             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
606                                 (f, control.Find("Priority", ""), entry["priority"]))
607
608         entry["package"] = package
609         entry["architecture"] = architecture
610         entry["version"] = version
611         entry["maintainer"] = control.Find("Maintainer", "")
612
613         if f.endswith(".udeb"):
614             self.pkg.files[f]["dbtype"] = "udeb"
615         elif f.endswith(".deb"):
616             self.pkg.files[f]["dbtype"] = "deb"
617         else:
618             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
619
620         entry["source"] = control.Find("Source", entry["package"])
621
622         # Get the source version
623         source = entry["source"]
624         source_version = ""
625
626         if source.find("(") != -1:
627             m = re_extract_src_version.match(source)
628             source = m.group(1)
629             source_version = m.group(2)
630
631         if not source_version:
632             source_version = self.pkg.files[f]["version"]
633
634         entry["source package"] = source
635         entry["source version"] = source_version
636
637         # Ensure the filename matches the contents of the .deb
638         m = re_isadeb.match(f)
639
640         #  package name
641         file_package = m.group(1)
642         if entry["package"] != file_package:
643             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
644                                 (f, file_package, entry["dbtype"], entry["package"]))
645         epochless_version = re_no_epoch.sub('', control.Find("Version"))
646
647         #  version
648         file_version = m.group(2)
649         if epochless_version != file_version:
650             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
651                                 (f, file_version, entry["dbtype"], epochless_version))
652
653         #  architecture
654         file_architecture = m.group(3)
655         if entry["architecture"] != file_architecture:
656             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
657                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
658
659         # Check for existent source
660         source_version = entry["source version"]
661         source_package = entry["source package"]
662         if self.pkg.changes["architecture"].has_key("source"):
663             if source_version != self.pkg.changes["version"]:
664                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
665                                     (source_version, f, self.pkg.changes["version"]))
666         else:
667             # Check in the SQL database
668             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
669                 # Check in one of the other directories
670                 source_epochless_version = re_no_epoch.sub('', source_version)
671                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
672                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
673                     entry["byhand"] = 1
674                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
675                     entry["new"] = 1
676                 else:
677                     dsc_file_exists = False
678                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
679                         if cnf.has_key("Dir::Queue::%s" % (myq)):
680                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
681                                 dsc_file_exists = True
682                                 break
683
684                     if not dsc_file_exists:
685                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
686
687         # Check the version and for file overwrites
688         self.check_binary_against_db(f, session)
689
690         b = Binary(f)
691         b.scan_package()
692         if len(b.rejects) > 0:
693             for j in b.rejects:
694                 self.rejects.append(j)
695
696     def source_file_checks(self, f, session):
697         entry = self.pkg.files[f]
698
699         m = re_issource.match(f)
700         if not m:
701             return
702
703         entry["package"] = m.group(1)
704         entry["version"] = m.group(2)
705         entry["type"] = m.group(3)
706
707         # Ensure the source package name matches the Source filed in the .changes
708         if self.pkg.changes["source"] != entry["package"]:
709             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
710
711         # Ensure the source version matches the version in the .changes file
712         if entry["type"] == "orig.tar.gz":
713             changes_version = self.pkg.changes["chopversion2"]
714         else:
715             changes_version = self.pkg.changes["chopversion"]
716
717         if changes_version != entry["version"]:
718             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
719
720         # Ensure the .changes lists source in the Architecture field
721         if not self.pkg.changes["architecture"].has_key("source"):
722             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
723
724         # Check the signature of a .dsc file
725         if entry["type"] == "dsc":
726             # check_signature returns either:
727             #  (None, [list, of, rejects]) or (signature, [])
728             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
729             for j in rejects:
730                 self.rejects.append(j)
731
732         entry["architecture"] = "source"
733
734     def per_suite_file_checks(self, f, suite, session):
735         cnf = Config()
736         entry = self.pkg.files[f]
737         archive = utils.where_am_i()
738
739         # Skip byhand
740         if entry.has_key("byhand"):
741             return
742
743         # Check we have fields we need to do these checks
744         oktogo = True
745         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
746             if not entry.has_key(m):
747                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
748                 oktogo = False
749
750         if not oktogo:
751             return
752
753         # Handle component mappings
754         for m in cnf.ValueList("ComponentMappings"):
755             (source, dest) = m.split()
756             if entry["component"] == source:
757                 entry["original component"] = source
758                 entry["component"] = dest
759
760         # Ensure the component is valid for the target suite
761         if cnf.has_key("Suite:%s::Components" % (suite)) and \
762            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
763             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
764             return
765
766         # Validate the component
767         if not get_component(entry["component"], session):
768             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
769             return
770
771         # See if the package is NEW
772         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
773             entry["new"] = 1
774
775         # Validate the priority
776         if entry["priority"].find('/') != -1:
777             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
778
779         # Determine the location
780         location = cnf["Dir::Pool"]
781         l = get_location(location, entry["component"], archive, session)
782         if l is None:
783             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
784             entry["location id"] = -1
785         else:
786             entry["location id"] = l.location_id
787
788         # Check the md5sum & size against existing files (if any)
789         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
790
791         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
792                                          entry["size"], entry["md5sum"], entry["location id"])
793
794         if found is None:
795             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
796         elif found is False and poolfile is not None:
797             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
798         else:
799             if poolfile is None:
800                 entry["files id"] = None
801             else:
802                 entry["files id"] = poolfile.file_id
803
804         # Check for packages that have moved from one component to another
805         entry['suite'] = suite
806         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
807         if res.rowcount > 0:
808             entry["othercomponents"] = res.fetchone()[0]
809
810     def check_files(self, action=True):
811         archive = utils.where_am_i()
812         file_keys = self.pkg.files.keys()
813         holding = Holding()
814         cnf = Config()
815
816         # XXX: As far as I can tell, this can no longer happen - see
817         #      comments by AJ in old revisions - mhy
818         # if reprocess is 2 we've already done this and we're checking
819         # things again for the new .orig.tar.gz.
820         # [Yes, I'm fully aware of how disgusting this is]
821         if action and self.reprocess < 2:
822             cwd = os.getcwd()
823             os.chdir(self.pkg.directory)
824             for f in file_keys:
825                 ret = holding.copy_to_holding(f)
826                 if ret is not None:
827                     # XXX: Should we bail out here or try and continue?
828                     self.rejects.append(ret)
829
830             os.chdir(cwd)
831
832         # Check there isn't already a .changes or .dak file of the same name in
833         # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
834         # [NB: this check must be done post-suite mapping]
835         base_filename = os.path.basename(self.pkg.changes_file)
836         dot_dak_filename = base_filename[:-8] + ".dak"
837
838         for suite in self.pkg.changes["distribution"].keys():
839             copychanges = "Suite::%s::CopyChanges" % (suite)
840             if cnf.has_key(copychanges) and \
841                    os.path.exists(os.path.join(cnf[copychanges], base_filename)):
842                 self.rejects.append("%s: a file with this name already exists in %s" \
843                            % (base_filename, cnf[copychanges]))
844
845             copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
846             if cnf.has_key(copy_dot_dak) and \
847                    os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
848                 self.rejects.append("%s: a file with this name already exists in %s" \
849                            % (dot_dak_filename, Cnf[copy_dot_dak]))
850
851         self.reprocess = 0
852         has_binaries = False
853         has_source = False
854
855         session = DBConn().session()
856
857         for f, entry in self.pkg.files.items():
858             # Ensure the file does not already exist in one of the accepted directories
859             for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
860                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
861                 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
862                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
863
864             if not re_taint_free.match(f):
865                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
866
867             # Check the file is readable
868             if os.access(f, os.R_OK) == 0:
869                 # When running in -n, copy_to_holding() won't have
870                 # generated the reject_message, so we need to.
871                 if action:
872                     if os.path.exists(f):
873                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
874                     else:
875                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
876                 entry["type"] = "unreadable"
877                 continue
878
879             # If it's byhand skip remaining checks
880             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
881                 entry["byhand"] = 1
882                 entry["type"] = "byhand"
883
884             # Checks for a binary package...
885             elif re_isadeb.match(f):
886                 has_binaries = True
887                 entry["type"] = "deb"
888
889                 # This routine appends to self.rejects/warnings as appropriate
890                 self.binary_file_checks(f, session)
891
892             # Checks for a source package...
893             elif re_issource.match(f):
894                 has_source = True
895
896                 # This routine appends to self.rejects/warnings as appropriate
897                 self.source_file_checks(f, session)
898
899             # Not a binary or source package?  Assume byhand...
900             else:
901                 entry["byhand"] = 1
902                 entry["type"] = "byhand"
903
904             # Per-suite file checks
905             entry["oldfiles"] = {}
906             for suite in self.pkg.changes["distribution"].keys():
907                 self.per_suite_file_checks(f, suite, session)
908
909         session.close()
910
911         # If the .changes file says it has source, it must have source.
912         if self.pkg.changes["architecture"].has_key("source"):
913             if not has_source:
914                 self.rejects.append("no source found and Architecture line in changes mention source.")
915
916             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
917                 self.rejects.append("source only uploads are not supported.")
918
919     ###########################################################################
920     def check_dsc(self, action=True):
921         """Returns bool indicating whether or not the source changes are valid"""
922         # Ensure there is source to check
923         if not self.pkg.changes["architecture"].has_key("source"):
924             return True
925
926         # Find the .dsc
927         dsc_filename = None
928         for f, entry in self.pkg.files.items():
929             if entry["type"] == "dsc":
930                 if dsc_filename:
931                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
932                     return False
933                 else:
934                     dsc_filename = f
935
936         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
937         if not dsc_filename:
938             self.rejects.append("source uploads must contain a dsc file")
939             return False
940
941         # Parse the .dsc file
942         try:
943             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
944         except CantOpenError:
945             # if not -n copy_to_holding() will have done this for us...
946             if not action:
947                 self.rejects.append("%s: can't read file." % (dsc_filename))
948         except ParseChangesError, line:
949             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
950         except InvalidDscError, line:
951             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
952         except ChangesUnicodeError:
953             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
954
955         # Build up the file list of files mentioned by the .dsc
956         try:
957             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
958         except NoFilesFieldError:
959             self.rejects.append("%s: no Files: field." % (dsc_filename))
960             return False
961         except UnknownFormatError, format:
962             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
963             return False
964         except ParseChangesError, line:
965             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
966             return False
967
968         # Enforce mandatory fields
969         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
970             if not self.pkg.dsc.has_key(i):
971                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
972                 return False
973
974         # Validate the source and version fields
975         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
976             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
977         if not re_valid_version.match(self.pkg.dsc["version"]):
978             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
979
980         # Bumping the version number of the .dsc breaks extraction by stable's
981         # dpkg-source.  So let's not do that...
982         if self.pkg.dsc["format"] != "1.0":
983             self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
984
985         # Validate the Maintainer field
986         try:
987             # We ignore the return value
988             fix_maintainer(self.pkg.dsc["maintainer"])
989         except ParseMaintError, msg:
990             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
991                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
992
993         # Validate the build-depends field(s)
994         for field_name in [ "build-depends", "build-depends-indep" ]:
995             field = self.pkg.dsc.get(field_name)
996             if field:
997                 # Check for broken dpkg-dev lossage...
998                 if field.startswith("ARRAY"):
999                     self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
1000                                         (dsc_filename, field_name.title()))
1001
1002                 # Have apt try to parse them...
1003                 try:
1004                     apt_pkg.ParseSrcDepends(field)
1005                 except:
1006                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1007
1008         # Ensure the version number in the .dsc matches the version number in the .changes
1009         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1010         changes_version = self.pkg.files[dsc_filename]["version"]
1011
1012         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1013             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1014
1015         # Ensure there is a .tar.gz in the .dsc file
1016         has_tar = False
1017         for f in self.pkg.dsc_files.keys():
1018             m = re_issource.match(f)
1019             if not m:
1020                 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1021                 continue
1022             ftype = m.group(3)
1023             if ftype == "orig.tar.gz" or ftype == "tar.gz":
1024                 has_tar = True
1025
1026         if not has_tar:
1027             self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1028
1029         # Ensure source is newer than existing source in target suites
1030         session = DBConn().session()
1031         self.check_source_against_db(dsc_filename, session)
1032         self.check_dsc_against_db(dsc_filename, session)
1033         session.close()
1034
1035         return True
1036
1037     ###########################################################################
1038
1039     def get_changelog_versions(self, source_dir):
1040         """Extracts a the source package and (optionally) grabs the
1041         version history out of debian/changelog for the BTS."""
1042
1043         cnf = Config()
1044
1045         # Find the .dsc (again)
1046         dsc_filename = None
1047         for f in self.pkg.files.keys():
1048             if self.pkg.files[f]["type"] == "dsc":
1049                 dsc_filename = f
1050
1051         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1052         if not dsc_filename:
1053             return
1054
1055         # Create a symlink mirror of the source files in our temporary directory
1056         for f in self.pkg.files.keys():
1057             m = re_issource.match(f)
1058             if m:
1059                 src = os.path.join(source_dir, f)
1060                 # If a file is missing for whatever reason, give up.
1061                 if not os.path.exists(src):
1062                     return
1063                 ftype = m.group(3)
1064                 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1065                     continue
1066                 dest = os.path.join(os.getcwd(), f)
1067                 os.symlink(src, dest)
1068
1069         # If the orig.tar.gz is not a part of the upload, create a symlink to the
1070         # existing copy.
1071         if self.pkg.orig_tar_gz:
1072             dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1073             os.symlink(self.pkg.orig_tar_gz, dest)
1074
1075         # Extract the source
1076         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1077         (result, output) = commands.getstatusoutput(cmd)
1078         if (result != 0):
1079             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1080             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1081             return
1082
1083         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1084             return
1085
1086         # Get the upstream version
1087         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1088         if re_strip_revision.search(upstr_version):
1089             upstr_version = re_strip_revision.sub('', upstr_version)
1090
1091         # Ensure the changelog file exists
1092         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1093         if not os.path.exists(changelog_filename):
1094             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1095             return
1096
1097         # Parse the changelog
1098         self.pkg.dsc["bts changelog"] = ""
1099         changelog_file = utils.open_file(changelog_filename)
1100         for line in changelog_file.readlines():
1101             m = re_changelog_versions.match(line)
1102             if m:
1103                 self.pkg.dsc["bts changelog"] += line
1104         changelog_file.close()
1105
1106         # Check we found at least one revision in the changelog
1107         if not self.pkg.dsc["bts changelog"]:
1108             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1109
1110     def check_source(self):
1111         # XXX: I'm fairly sure reprocess == 2 can never happen
1112         #      AJT disabled the is_incoming check years ago - mhy
1113         #      We should probably scrap or rethink the whole reprocess thing
1114         # Bail out if:
1115         #    a) there's no source
1116         # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1117         # or c) the orig.tar.gz is MIA
1118         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1119            or self.pkg.orig_tar_gz == -1:
1120             return
1121
1122         tmpdir = utils.temp_dirname()
1123
1124         # Move into the temporary directory
1125         cwd = os.getcwd()
1126         os.chdir(tmpdir)
1127
1128         # Get the changelog version history
1129         self.get_changelog_versions(cwd)
1130
1131         # Move back and cleanup the temporary tree
1132         os.chdir(cwd)
1133
1134         try:
1135             shutil.rmtree(tmpdir)
1136         except OSError, e:
1137             if e.errno != errno.EACCES:
1138                 print "foobar"
1139                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1140
1141             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1142             # We probably have u-r or u-w directories so chmod everything
1143             # and try again.
1144             cmd = "chmod -R u+rwx %s" % (tmpdir)
1145             result = os.system(cmd)
1146             if result != 0:
1147                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1148             shutil.rmtree(tmpdir)
1149         except Exception, e:
1150             print "foobar2 (%s)" % e
1151             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1152
1153     ###########################################################################
1154     def ensure_hashes(self):
1155         # Make sure we recognise the format of the Files: field in the .changes
1156         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1157         if len(format) == 2:
1158             format = int(format[0]), int(format[1])
1159         else:
1160             format = int(float(format[0])), 0
1161
1162         # We need to deal with the original changes blob, as the fields we need
1163         # might not be in the changes dict serialised into the .dak anymore.
1164         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1165
1166         # Copy the checksums over to the current changes dict.  This will keep
1167         # the existing modifications to it intact.
1168         for field in orig_changes:
1169             if field.startswith('checksums-'):
1170                 self.pkg.changes[field] = orig_changes[field]
1171
1172         # Check for unsupported hashes
1173         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1174             self.rejects.append(j)
1175
1176         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1177             self.rejects.append(j)
1178
1179         # We have to calculate the hash if we have an earlier changes version than
1180         # the hash appears in rather than require it exist in the changes file
1181         for hashname, hashfunc, version in utils.known_hashes:
1182             # TODO: Move _ensure_changes_hash into this class
1183             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1184                 self.rejects.append(j)
1185             if "source" in self.pkg.changes["architecture"]:
1186                 # TODO: Move _ensure_dsc_hash into this class
1187                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1188                     self.rejects.append(j)
1189
1190     def check_hashes(self):
1191         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1192             self.rejects.append(m)
1193
1194         for m in utils.check_size(".changes", self.pkg.files):
1195             self.rejects.append(m)
1196
1197         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1198             self.rejects.append(m)
1199
1200         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1201             self.rejects.append(m)
1202
1203         self.ensure_hashes()
1204
1205     ###########################################################################
1206     def check_urgency(self):
1207         cnf = Config()
1208         if self.pkg.changes["architecture"].has_key("source"):
1209             if not self.pkg.changes.has_key("urgency"):
1210                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1211             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1212             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1213                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1214                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1215                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1216
1217     ###########################################################################
1218
1219     # Sanity check the time stamps of files inside debs.
1220     # [Files in the near future cause ugly warnings and extreme time
1221     #  travel can cause errors on extraction]
1222
1223     def check_timestamps(self):
1224         Cnf = Config()
1225
1226         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1227         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1228         tar = TarTime(future_cutoff, past_cutoff)
1229
1230         for filename, entry in self.pkg.files.items():
1231             if entry["type"] == "deb":
1232                 tar.reset()
1233                 try:
1234                     deb_file = utils.open_file(filename)
1235                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1236                     deb_file.seek(0)
1237                     try:
1238                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1239                     except SystemError, e:
1240                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1241                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1242                             raise
1243                         deb_file.seek(0)
1244                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1245
1246                     deb_file.close()
1247
1248                     future_files = tar.future_files.keys()
1249                     if future_files:
1250                         num_future_files = len(future_files)
1251                         future_file = future_files[0]
1252                         future_date = tar.future_files[future_file]
1253                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1254                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1255
1256                     ancient_files = tar.ancient_files.keys()
1257                     if ancient_files:
1258                         num_ancient_files = len(ancient_files)
1259                         ancient_file = ancient_files[0]
1260                         ancient_date = tar.ancient_files[ancient_file]
1261                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1262                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1263                 except:
1264                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1265
1266     ###########################################################################
1267     def check_signed_by_key(self):
1268         """Ensure the .changes is signed by an authorized uploader."""
1269         session = DBConn().session()
1270
1271         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1272
1273         # match claimed name with actual name:
1274         if uid is None:
1275             # This is fundamentally broken but need us to refactor how we get
1276             # the UIDs/Fingerprints in order for us to fix it properly
1277             uid, uid_email = self.pkg.changes["fingerprint"], uid
1278             may_nmu, may_sponsor = 1, 1
1279             # XXX by default new dds don't have a fingerprint/uid in the db atm,
1280             #     and can't get one in there if we don't allow nmu/sponsorship
1281         elif is_dm is False:
1282             # If is_dm is False, we allow full upload rights
1283             uid_email = "%s@debian.org" % (uid)
1284             may_nmu, may_sponsor = 1, 1
1285         else:
1286             # Assume limited upload rights unless we've discovered otherwise
1287             uid_email = uid
1288             may_nmu, may_sponsor = 0, 0
1289
1290         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1291             sponsored = 0
1292         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1293             sponsored = 0
1294             if uid_name == "": sponsored = 1
1295         else:
1296             sponsored = 1
1297             if ("source" in self.pkg.changes["architecture"] and
1298                 uid_email and utils.is_email_alias(uid_email)):
1299                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1300                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1301                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1302                     self.pkg.changes["sponsoremail"] = uid_email
1303
1304         if sponsored and not may_sponsor:
1305             self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1306
1307         if not sponsored and not may_nmu:
1308             should_reject = True
1309             highest_sid, highest_version = None, None
1310
1311             # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1312             #      It ignores higher versions with the dm_upload_allowed flag set to false
1313             #      I'm keeping the existing behaviour for now until I've gone back and
1314             #      checked exactly what the GR says - mhy
1315             for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1316                 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1317                      highest_sid = si.source_id
1318                      highest_version = si.version
1319
1320             if highest_sid is None:
1321                 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1322             else:
1323                 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1324                     (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1325                     if email == uid_email or name == uid_name:
1326                         should_reject = False
1327                         break
1328
1329             if should_reject is True:
1330                 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1331
1332             for b in self.pkg.changes["binary"].keys():
1333                 for suite in self.pkg.changes["distribution"].keys():
1334                     q = session.query(DBSource)
1335                     q = q.join(DBBinary).filter_by(package=b)
1336                     q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1337
1338                     for s in q.all():
1339                         if s.source != self.pkg.changes["source"]:
1340                             self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1341
1342             for f in self.pkg.files.keys():
1343                 if self.pkg.files[f].has_key("byhand"):
1344                     self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1345                 if self.pkg.files[f].has_key("new"):
1346                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1347
1348         session.close()
1349
1350     ###########################################################################
1351     def build_summaries(self):
1352         """ Build a summary of changes the upload introduces. """
1353
1354         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1355
1356         short_summary = summary
1357
1358         # This is for direport's benefit...
1359         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1360
1361         if byhand or new:
1362             summary += "Changes: " + f
1363
1364         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1365
1366         summary += self.announce(short_summary, 0)
1367
1368         return (summary, short_summary)
1369
1370     ###########################################################################
1371
1372     def close_bugs(self, summary, action):
1373         """
1374         Send mail to close bugs as instructed by the closes field in the changes file.
1375         Also add a line to summary if any work was done.
1376
1377         @type summary: string
1378         @param summary: summary text, as given by L{build_summaries}
1379
1380         @type action: bool
1381         @param action: Set to false no real action will be done.
1382
1383         @rtype: string
1384         @return: summary. If action was taken, extended by the list of closed bugs.
1385
1386         """
1387
1388         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1389
1390         bugs = self.pkg.changes["closes"].keys()
1391
1392         if not bugs:
1393             return summary
1394
1395         bugs.sort()
1396         summary += "Closing bugs: "
1397         for bug in bugs:
1398             summary += "%s " % (bug)
1399             if action:
1400                 self.Subst["__BUG_NUMBER__"] = bug
1401                 if self.pkg.changes["distribution"].has_key("stable"):
1402                     self.Subst["__STABLE_WARNING__"] = """
1403 Note that this package is not part of the released stable Debian
1404 distribution.  It may have dependencies on other unreleased software,
1405 or other instabilities.  Please take care if you wish to install it.
1406 The update will eventually make its way into the next released Debian
1407 distribution."""
1408                 else:
1409                     self.Subst["__STABLE_WARNING__"] = ""
1410                     mail_message = utils.TemplateSubst(self.Subst, template)
1411                     utils.send_mail(mail_message)
1412
1413                 # Clear up after ourselves
1414                 del self.Subst["__BUG_NUMBER__"]
1415                 del self.Subst["__STABLE_WARNING__"]
1416
1417         if action and self.logger:
1418             self.logger.log(["closing bugs"] + bugs)
1419
1420         summary += "\n"
1421
1422         return summary
1423
1424     ###########################################################################
1425
1426     def announce(self, short_summary, action):
1427         """
1428         Send an announce mail about a new upload.
1429
1430         @type short_summary: string
1431         @param short_summary: Short summary text to include in the mail
1432
1433         @type action: bool
1434         @param action: Set to false no real action will be done.
1435
1436         @rtype: string
1437         @return: Textstring about action taken.
1438
1439         """
1440
1441         cnf = Config()
1442         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1443
1444         # Only do announcements for source uploads with a recent dpkg-dev installed
1445         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1446            self.pkg.changes["architecture"].has_key("source"):
1447             return ""
1448
1449         lists_done = {}
1450         summary = ""
1451
1452         self.Subst["__SHORT_SUMMARY__"] = short_summary
1453
1454         for dist in self.pkg.changes["distribution"].keys():
1455             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1456             if announce_list == "" or lists_done.has_key(announce_list):
1457                 continue
1458
1459             lists_done[announce_list] = 1
1460             summary += "Announcing to %s\n" % (announce_list)
1461
1462             if action:
1463                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1464                 if cnf.get("Dinstall::TrackingServer") and \
1465                    self.pkg.changes["architecture"].has_key("source"):
1466                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1467                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1468
1469                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1470                 utils.send_mail(mail_message)
1471
1472                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1473
1474         if cnf.FindB("Dinstall::CloseBugs"):
1475             summary = self.close_bugs(summary, action)
1476
1477         del self.Subst["__SHORT_SUMMARY__"]
1478
1479         return summary
1480
1481     ###########################################################################
1482
1483     def accept (self, summary, short_summary, targetdir=None):
1484         """
1485         Accept an upload.
1486
1487         This moves all files referenced from the .changes into the I{accepted}
1488         queue, sends the accepted mail, announces to lists, closes bugs and
1489         also checks for override disparities. If enabled it will write out
1490         the version history for the BTS Version Tracking and will finally call
1491         L{queue_build}.
1492
1493         @type summary: string
1494         @param summary: Summary text
1495
1496         @type short_summary: string
1497         @param short_summary: Short summary
1498
1499         """
1500
1501         cnf = Config()
1502         stats = SummaryStats()
1503
1504         accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1505
1506         if targetdir is None:
1507             targetdir = cnf["Dir::Queue::Accepted"]
1508
1509         print "Accepting."
1510         if self.logger:
1511             self.logger.log(["Accepting changes", self.pkg.changes_file])
1512
1513         self.pkg.write_dot_dak(targetdir)
1514
1515         # Move all the files into the accepted directory
1516         utils.move(self.pkg.changes_file, targetdir)
1517
1518         for name, entry in sorted(self.pkg.files.items()):
1519             utils.move(name, targetdir)
1520             stats.accept_bytes += float(entry["size"])
1521
1522         stats.accept_count += 1
1523
1524         # Send accept mail, announce to lists, close bugs and check for
1525         # override disparities
1526         if not cnf["Dinstall::Options::No-Mail"]:
1527             self.Subst["__SUITE__"] = ""
1528             self.Subst["__SUMMARY__"] = summary
1529             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1530             utils.send_mail(mail_message)
1531             self.announce(short_summary, 1)
1532
1533         ## Helper stuff for DebBugs Version Tracking
1534         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1535             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1536             # the conditionalization on dsc["bts changelog"] should be
1537             # dropped.
1538
1539             # Write out the version history from the changelog
1540             if self.pkg.changes["architecture"].has_key("source") and \
1541                self.pkg.dsc.has_key("bts changelog"):
1542
1543                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1544                 version_history = os.fdopen(fd, 'w')
1545                 version_history.write(self.pkg.dsc["bts changelog"])
1546                 version_history.close()
1547                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1548                                       self.pkg.changes_file[:-8]+".versions")
1549                 os.rename(temp_filename, filename)
1550                 os.chmod(filename, 0644)
1551
1552             # Write out the binary -> source mapping.
1553             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1554             debinfo = os.fdopen(fd, 'w')
1555             for name, entry in sorted(self.pkg.files.items()):
1556                 if entry["type"] == "deb":
1557                     line = " ".join([entry["package"], entry["version"],
1558                                      entry["architecture"], entry["source package"],
1559                                      entry["source version"]])
1560                     debinfo.write(line+"\n")
1561             debinfo.close()
1562             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1563                                   self.pkg.changes_file[:-8]+".debinfo")
1564             os.rename(temp_filename, filename)
1565             os.chmod(filename, 0644)
1566
1567         # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1568         # <Ganneff> we do call queue_build too
1569         # <mhy> well yes, we'd have had to if we were inserting into accepted
1570         # <Ganneff> now. thats database only.
1571         # <mhy> urgh, that's going to get messy
1572         # <Ganneff> so i make the p-n call to it *also* using accepted/
1573         # <mhy> but then the packages will be in the queue_build table without the files being there
1574         # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1575         # <mhy> ah, good point
1576         # <Ganneff> so it will work out, as unchecked move it over
1577         # <mhy> that's all completely sick
1578         # <Ganneff> yes
1579
1580         # This routine returns None on success or an error on failure
1581         res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1582         if res:
1583             utils.fubar(res)
1584
1585
1586     def check_override(self):
1587         """
1588         Checks override entries for validity. Mails "Override disparity" warnings,
1589         if that feature is enabled.
1590
1591         Abandons the check if
1592           - override disparity checks are disabled
1593           - mail sending is disabled
1594         """
1595
1596         cnf = Config()
1597
1598         # Abandon the check if:
1599         #  a) override disparity checks have been disabled
1600         #  b) we're not sending mail
1601         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1602            cnf["Dinstall::Options::No-Mail"]:
1603             return
1604
1605         summary = self.pkg.check_override()
1606
1607         if summary == "":
1608             return
1609
1610         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1611
1612         self.Subst["__SUMMARY__"] = summary
1613         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1614         utils.send_mail(mail_message)
1615         del self.Subst["__SUMMARY__"]
1616
1617     ###########################################################################
1618
1619     def remove(self, dir=None):
1620         """
1621         Used (for instance) in p-u to remove the package from unchecked
1622         """
1623         if dir is None:
1624             os.chdir(self.pkg.directory)
1625         else:
1626             os.chdir(dir)
1627
1628         for f in self.pkg.files.keys():
1629             os.unlink(f)
1630         os.unlink(self.pkg.changes_file)
1631
1632     ###########################################################################
1633
1634     def move_to_dir (self, dest, perms=0660, changesperms=0664):
1635         """
1636         Move files to dest with certain perms/changesperms
1637         """
1638         utils.move(self.pkg.changes_file, dest, perms=changesperms)
1639         for f in self.pkg.files.keys():
1640             utils.move(f, dest, perms=perms)
1641
1642     ###########################################################################
1643
1644     def force_reject(self, reject_files):
1645         """
1646         Forcefully move files from the current directory to the
1647         reject directory.  If any file already exists in the reject
1648         directory it will be moved to the morgue to make way for
1649         the new file.
1650
1651         @type files: dict
1652         @param files: file dictionary
1653
1654         """
1655
1656         cnf = Config()
1657
1658         for file_entry in reject_files:
1659             # Skip any files which don't exist or which we don't have permission to copy.
1660             if os.access(file_entry, os.R_OK) == 0:
1661                 continue
1662
1663             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1664
1665             try:
1666                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1667             except OSError, e:
1668                 # File exists?  Let's try and move it to the morgue
1669                 if e.errno == errno.EEXIST:
1670                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1671                     try:
1672                         morgue_file = utils.find_next_free(morgue_file)
1673                     except NoFreeFilenameError:
1674                         # Something's either gone badly Pete Tong, or
1675                         # someone is trying to exploit us.
1676                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1677                         return
1678                     utils.move(dest_file, morgue_file, perms=0660)
1679                     try:
1680                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1681                     except OSError, e:
1682                         # Likewise
1683                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1684                         return
1685                 else:
1686                     raise
1687             # If we got here, we own the destination file, so we can
1688             # safely overwrite it.
1689             utils.move(file_entry, dest_file, 1, perms=0660)
1690             os.close(dest_fd)
1691
1692     ###########################################################################
1693     def do_reject (self, manual=0, reject_message="", note=""):
1694         """
1695         Reject an upload. If called without a reject message or C{manual} is
1696         true, spawn an editor so the user can write one.
1697
1698         @type manual: bool
1699         @param manual: manual or automated rejection
1700
1701         @type reject_message: string
1702         @param reject_message: A reject message
1703
1704         @return: 0
1705
1706         """
1707         # If we weren't given a manual rejection message, spawn an
1708         # editor so the user can add one in...
1709         if manual and not reject_message:
1710             (fd, temp_filename) = utils.temp_filename()
1711             temp_file = os.fdopen(fd, 'w')
1712             if len(note) > 0:
1713                 for line in note:
1714                     temp_file.write(line)
1715             temp_file.close()
1716             editor = os.environ.get("EDITOR","vi")
1717             answer = 'E'
1718             while answer == 'E':
1719                 os.system("%s %s" % (editor, temp_filename))
1720                 temp_fh = utils.open_file(temp_filename)
1721                 reject_message = "".join(temp_fh.readlines())
1722                 temp_fh.close()
1723                 print "Reject message:"
1724                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
1725                 prompt = "[R]eject, Edit, Abandon, Quit ?"
1726                 answer = "XXX"
1727                 while prompt.find(answer) == -1:
1728                     answer = utils.our_raw_input(prompt)
1729                     m = re_default_answer.search(prompt)
1730                     if answer == "":
1731                         answer = m.group(1)
1732                     answer = answer[:1].upper()
1733             os.unlink(temp_filename)
1734             if answer == 'A':
1735                 return 1
1736             elif answer == 'Q':
1737                 sys.exit(0)
1738
1739         print "Rejecting.\n"
1740
1741         cnf = Config()
1742
1743         reason_filename = self.pkg.changes_file[:-8] + ".reason"
1744         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1745
1746         # Move all the files into the reject directory
1747         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1748         self.force_reject(reject_files)
1749
1750         # If we fail here someone is probably trying to exploit the race
1751         # so let's just raise an exception ...
1752         if os.path.exists(reason_filename):
1753             os.unlink(reason_filename)
1754         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1755
1756         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1757
1758         if not manual:
1759             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1760             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1761             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1762             os.write(reason_fd, reject_message)
1763             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1764         else:
1765             # Build up the rejection email
1766             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1767             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1768             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1769             self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1770             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1771             # Write the rejection email out as the <foo>.reason file
1772             os.write(reason_fd, reject_mail_message)
1773
1774         del self.Subst["__REJECTOR_ADDRESS__"]
1775         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1776         del self.Subst["__CC__"]
1777
1778         os.close(reason_fd)
1779
1780         # Send the rejection mail if appropriate
1781         if not cnf["Dinstall::Options::No-Mail"]:
1782             utils.send_mail(reject_mail_message)
1783
1784         if self.logger:
1785             self.logger.log(["rejected", self.pkg.changes_file])
1786
1787         return 0
1788
1789     ################################################################################
1790     def in_override_p(self, package, component, suite, binary_type, file, session):
1791         """
1792         Check if a package already has override entries in the DB
1793
1794         @type package: string
1795         @param package: package name
1796
1797         @type component: string
1798         @param component: database id of the component
1799
1800         @type suite: int
1801         @param suite: database id of the suite
1802
1803         @type binary_type: string
1804         @param binary_type: type of the package
1805
1806         @type file: string
1807         @param file: filename we check
1808
1809         @return: the database result. But noone cares anyway.
1810
1811         """
1812
1813         cnf = Config()
1814
1815         if binary_type == "": # must be source
1816             file_type = "dsc"
1817         else:
1818             file_type = binary_type
1819
1820         # Override suite name; used for example with proposed-updates
1821         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1822             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1823
1824         result = get_override(package, suite, component, file_type, session)
1825
1826         # If checking for a source package fall back on the binary override type
1827         if file_type == "dsc" and len(result) < 1:
1828             result = get_override(package, suite, component, ['deb', 'udeb'], session)
1829
1830         # Remember the section and priority so we can check them later if appropriate
1831         if len(result) > 0:
1832             result = result[0]
1833             self.pkg.files[file]["override section"] = result.section.section
1834             self.pkg.files[file]["override priority"] = result.priority.priority
1835             return result
1836
1837         return None
1838
1839     ################################################################################
1840     def get_anyversion(self, sv_list, suite):
1841         """
1842         @type sv_list: list
1843         @param sv_list: list of (suite, version) tuples to check
1844
1845         @type suite: string
1846         @param suite: suite name
1847
1848         Description: TODO
1849         """
1850         Cnf = Config()
1851         anyversion = None
1852         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1853         for (s, v) in sv_list:
1854             if s in [ x.lower() for x in anysuite ]:
1855                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1856                     anyversion = v
1857
1858         return anyversion
1859
1860     ################################################################################
1861
1862     def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1863         """
1864         @type sv_list: list
1865         @param sv_list: list of (suite, version) tuples to check
1866
1867         @type file: string
1868         @param file: XXX
1869
1870         @type new_version: string
1871         @param new_version: XXX
1872
1873         Ensure versions are newer than existing packages in target
1874         suites and that cross-suite version checking rules as
1875         set out in the conf file are satisfied.
1876         """
1877
1878         cnf = Config()
1879
1880         # Check versions for each target suite
1881         for target_suite in self.pkg.changes["distribution"].keys():
1882             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1883             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1884
1885             # Enforce "must be newer than target suite" even if conffile omits it
1886             if target_suite not in must_be_newer_than:
1887                 must_be_newer_than.append(target_suite)
1888
1889             for (suite, existent_version) in sv_list:
1890                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1891
1892                 if suite in must_be_newer_than and sourceful and vercmp < 1:
1893                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1894
1895                 if suite in must_be_older_than and vercmp > -1:
1896                     cansave = 0
1897
1898                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1899                         # we really use the other suite, ignoring the conflicting one ...
1900                         addsuite = self.pkg.changes["distribution-version"][suite]
1901
1902                         add_version = self.get_anyversion(sv_list, addsuite)
1903                         target_version = self.get_anyversion(sv_list, target_suite)
1904
1905                         if not add_version:
1906                             # not add_version can only happen if we map to a suite
1907                             # that doesn't enhance the suite we're propup'ing from.
1908                             # so "propup-ver x a b c; map a d" is a problem only if
1909                             # d doesn't enhance a.
1910                             #
1911                             # i think we could always propagate in this case, rather
1912                             # than complaining. either way, this isn't a REJECT issue
1913                             #
1914                             # And - we really should complain to the dorks who configured dak
1915                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1916                             self.pkg.changes.setdefault("propdistribution", {})
1917                             self.pkg.changes["propdistribution"][addsuite] = 1
1918                             cansave = 1
1919                         elif not target_version:
1920                             # not targets_version is true when the package is NEW
1921                             # we could just stick with the "...old version..." REJECT
1922                             # for this, I think.
1923                             self.rejects.append("Won't propogate NEW packages.")
1924                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1925                             # propogation would be redundant. no need to reject though.
1926                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1927                             cansave = 1
1928                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1929                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
1930                             # propogate!!
1931                             self.warnings.append("Propogating upload to %s" % (addsuite))
1932                             self.pkg.changes.setdefault("propdistribution", {})
1933                             self.pkg.changes["propdistribution"][addsuite] = 1
1934                             cansave = 1
1935
1936                     if not cansave:
1937                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1938
1939     ################################################################################
1940     def check_binary_against_db(self, file, session):
1941         # Ensure version is sane
1942         q = session.query(BinAssociation)
1943         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1944         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1945
1946         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1947                                        file, self.pkg.files[file]["version"], sourceful=False)
1948
1949         # Check for any existing copies of the file
1950         q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1951         q = q.filter_by(version=self.pkg.files[file]["version"])
1952         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1953
1954         if q.count() > 0:
1955             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1956
1957     ################################################################################
1958
1959     def check_source_against_db(self, file, session):
1960         """
1961         """
1962         source = self.pkg.dsc.get("source")
1963         version = self.pkg.dsc.get("version")
1964
1965         # Ensure version is sane
1966         q = session.query(SrcAssociation)
1967         q = q.join(DBSource).filter(DBSource.source==source)
1968
1969         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1970                                        file, version, sourceful=True)
1971
1972     ################################################################################
1973     def check_dsc_against_db(self, file, session):
1974         """
1975
1976         @warning: NB: this function can remove entries from the 'files' index [if
1977          the .orig.tar.gz is a duplicate of the one in the archive]; if
1978          you're iterating over 'files' and call this function as part of
1979          the loop, be sure to add a check to the top of the loop to
1980          ensure you haven't just tried to dereference the deleted entry.
1981
1982         """
1983
1984         Cnf = Config()
1985         self.pkg.orig_tar_gz = None
1986
1987         # Try and find all files mentioned in the .dsc.  This has
1988         # to work harder to cope with the multiple possible
1989         # locations of an .orig.tar.gz.
1990         # The ordering on the select is needed to pick the newest orig
1991         # when it exists in multiple places.
1992         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1993             found = None
1994             if self.pkg.files.has_key(dsc_name):
1995                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1996                 actual_size = int(self.pkg.files[dsc_name]["size"])
1997                 found = "%s in incoming" % (dsc_name)
1998
1999                 # Check the file does not already exist in the archive
2000                 ql = get_poolfile_like_name(dsc_name, session)
2001
2002                 # Strip out anything that isn't '%s' or '/%s$'
2003                 for i in ql:
2004                     if not i.filename.endswith(dsc_name):
2005                         ql.remove(i)
2006
2007                 # "[dak] has not broken them.  [dak] has fixed a
2008                 # brokenness.  Your crappy hack exploited a bug in
2009                 # the old dinstall.
2010                 #
2011                 # "(Come on!  I thought it was always obvious that
2012                 # one just doesn't release different files with
2013                 # the same name and version.)"
2014                 #                        -- ajk@ on d-devel@l.d.o
2015
2016                 if len(ql) > 0:
2017                     # Ignore exact matches for .orig.tar.gz
2018                     match = 0
2019                     if dsc_name.endswith(".orig.tar.gz"):
2020                         for i in ql:
2021                             if self.pkg.files.has_key(dsc_name) and \
2022                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2023                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2024                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2025                                 # TODO: Don't delete the entry, just mark it as not needed
2026                                 # This would fix the stupidity of changing something we often iterate over
2027                                 # whilst we're doing it
2028                                 del self.pkg.files[dsc_name]
2029                                 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2030                                 match = 1
2031
2032                     if not match:
2033                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2034
2035             elif dsc_name.endswith(".orig.tar.gz"):
2036                 # Check in the pool
2037                 ql = get_poolfile_like_name(dsc_name, session)
2038
2039                 # Strip out anything that isn't '%s' or '/%s$'
2040                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2041                 for i in ql:
2042                     if not i.filename.endswith(dsc_name):
2043                         ql.remove(i)
2044
2045                 if len(ql) > 0:
2046                     # Unfortunately, we may get more than one match here if,
2047                     # for example, the package was in potato but had an -sa
2048                     # upload in woody.  So we need to choose the right one.
2049
2050                     # default to something sane in case we don't match any or have only one
2051                     x = ql[0]
2052
2053                     if len(ql) > 1:
2054                         for i in ql:
2055                             old_file = os.path.join(i.location.path, i.filename)
2056                             old_file_fh = utils.open_file(old_file)
2057                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2058                             old_file_fh.close()
2059                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2060                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2061                                 x = i
2062
2063                     old_file = os.path.join(i.location.path, i.filename)
2064                     old_file_fh = utils.open_file(old_file)
2065                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2066                     old_file_fh.close()
2067                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2068                     found = old_file
2069                     suite_type = x.location.archive_type
2070                     # need this for updating dsc_files in install()
2071                     dsc_entry["files id"] = x.file_id
2072                     # See install() in process-accepted...
2073                     self.pkg.orig_tar_id = x.file_id
2074                     self.pkg.orig_tar_gz = old_file
2075                     self.pkg.orig_tar_location = x.location.location_id
2076                 else:
2077                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2078                     # Not there? Check the queue directories...
2079                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2080                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2081                             continue
2082                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2083                         if os.path.exists(in_otherdir):
2084                             in_otherdir_fh = utils.open_file(in_otherdir)
2085                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2086                             in_otherdir_fh.close()
2087                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2088                             found = in_otherdir
2089                             self.pkg.orig_tar_gz = in_otherdir
2090
2091                     if not found:
2092                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2093                         self.pkg.orig_tar_gz = -1
2094                         continue
2095             else:
2096                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2097                 continue
2098             if actual_md5 != dsc_entry["md5sum"]:
2099                 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2100             if actual_size != int(dsc_entry["size"]):
2101                 self.rejects.append("size for %s doesn't match %s." % (found, file))
2102
2103     ################################################################################
2104     def accepted_checks(self, overwrite_checks, session):
2105         # Recheck anything that relies on the database; since that's not
2106         # frozen between accept and our run time when called from p-a.
2107
2108         # overwrite_checks is set to False when installing to stable/oldstable
2109
2110         propogate={}
2111         nopropogate={}
2112
2113         # Find the .dsc (again)
2114         dsc_filename = None
2115         for f in self.pkg.files.keys():
2116             if self.pkg.files[f]["type"] == "dsc":
2117                 dsc_filename = f
2118
2119         for checkfile in self.pkg.files.keys():
2120             # The .orig.tar.gz can disappear out from under us is it's a
2121             # duplicate of one in the archive.
2122             if not self.pkg.files.has_key(checkfile):
2123                 continue
2124
2125             entry = self.pkg.files[checkfile]
2126
2127             # Check that the source still exists
2128             if entry["type"] == "deb":
2129                 source_version = entry["source version"]
2130                 source_package = entry["source package"]
2131                 if not self.pkg.changes["architecture"].has_key("source") \
2132                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2133                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2134
2135             # Version and file overwrite checks
2136             if overwrite_checks:
2137                 if entry["type"] == "deb":
2138                     self.check_binary_against_db(checkfile, session)
2139                 elif entry["type"] == "dsc":
2140                     self.check_source_against_db(checkfile, session)
2141                     self.check_dsc_against_db(dsc_filename, session)
2142
2143             # propogate in the case it is in the override tables:
2144             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2145                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2146                     propogate[suite] = 1
2147                 else:
2148                     nopropogate[suite] = 1
2149
2150         for suite in propogate.keys():
2151             if suite in nopropogate:
2152                 continue
2153             self.pkg.changes["distribution"][suite] = 1
2154
2155         for checkfile in self.pkg.files.keys():
2156             # Check the package is still in the override tables
2157             for suite in self.pkg.changes["distribution"].keys():
2158                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2159                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2160
2161     ################################################################################
2162     # This is not really a reject, but an unaccept, but since a) the code for
2163     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2164     # extremely rare, for now we'll go with whining at our admin folks...
2165
2166     def do_unaccept(self):
2167         cnf = Config()
2168
2169         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2170         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2171         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2172         self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2173         if cnf.has_key("Dinstall::Bcc"):
2174             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2175
2176         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2177
2178         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2179
2180         # Write the rejection email out as the <foo>.reason file
2181         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2182         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2183
2184         # If we fail here someone is probably trying to exploit the race
2185         # so let's just raise an exception ...
2186         if os.path.exists(reject_filename):
2187             os.unlink(reject_filename)
2188
2189         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2190         os.write(fd, reject_mail_message)
2191         os.close(fd)
2192
2193         utils.send_mail(reject_mail_message)
2194
2195         del self.Subst["__REJECTOR_ADDRESS__"]
2196         del self.Subst["__REJECT_MESSAGE__"]
2197         del self.Subst["__CC__"]
2198
2199     ################################################################################
2200     # If any file of an upload has a recent mtime then chances are good
2201     # the file is still being uploaded.
2202
2203     def upload_too_new(self):
2204         cnf = Config()
2205         too_new = False
2206         # Move back to the original directory to get accurate time stamps
2207         cwd = os.getcwd()
2208         os.chdir(self.pkg.directory)
2209         file_list = self.pkg.files.keys()
2210         file_list.extend(self.pkg.dsc_files.keys())
2211         file_list.append(self.pkg.changes_file)
2212         for f in file_list:
2213             try:
2214                 last_modified = time.time()-os.path.getmtime(f)
2215                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2216                     too_new = True
2217                     break
2218             except:
2219                 pass
2220
2221         os.chdir(cwd)
2222         return too_new