]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
miscellaneous fixups from testing
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import cPickle
30 import errno
31 import os
32 import pg
33 import stat
34 import sys
35 import time
36 import apt_inst
37 import apt_pkg
38 import utils
39 import commands
40 import shutil
41 from types import *
42
43 from dak_exceptions import *
44 from changes import *
45 from regexes import *
46 from config import Config
47 from holding import Holding
48 from dbconn import *
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
53
54 ###############################################################################
55
56 def get_type(f, session=None):
57     """
58     Get the file type of C{f}
59
60     @type f: dict
61     @param f: file entry from Changes object
62
63     @rtype: string
64     @return: filetype
65
66     """
67     if session is None:
68         session = DBConn().session()
69
70     # Determine the type
71     if f.has_key("dbtype"):
72         file_type = file["dbtype"]
73     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
74         file_type = "dsc"
75     else:
76         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
77
78     # Validate the override type
79     type_id = get_override_type(file_type, session)
80     if type_id is None:
81         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
82
83     return file_type
84
85 ################################################################################
86
87 # Determine what parts in a .changes are NEW
88
89 def determine_new(changes, files, warn=1):
90     """
91     Determine what parts in a C{changes} file are NEW.
92
93     @type changes: Upload.Pkg.changes dict
94     @param changes: Changes dictionary
95
96     @type files: Upload.Pkg.files dict
97     @param files: Files dictionary
98
99     @type warn: bool
100     @param warn: Warn if overrides are added for (old)stable
101
102     @rtype: dict
103     @return: dictionary of NEW components.
104
105     """
106     new = {}
107
108     session = DBConn().session()
109
110     # Build up a list of potentially new things
111     for name, f in files.items():
112         # Skip byhand elements
113         if f["type"] == "byhand":
114             continue
115         pkg = f["package"]
116         priority = f["priority"]
117         section = f["section"]
118         file_type = get_type(f)
119         component = f["component"]
120
121         if file_type == "dsc":
122             priority = "source"
123
124         if not new.has_key(pkg):
125             new[pkg] = {}
126             new[pkg]["priority"] = priority
127             new[pkg]["section"] = section
128             new[pkg]["type"] = file_type
129             new[pkg]["component"] = component
130             new[pkg]["files"] = []
131         else:
132             old_type = new[pkg]["type"]
133             if old_type != file_type:
134                 # source gets trumped by deb or udeb
135                 if old_type == "dsc":
136                     new[pkg]["priority"] = priority
137                     new[pkg]["section"] = section
138                     new[pkg]["type"] = file_type
139                     new[pkg]["component"] = component
140
141         new[pkg]["files"].append(name)
142
143         if f.has_key("othercomponents"):
144             new[pkg]["othercomponents"] = f["othercomponents"]
145
146     for suite in changes["suite"].keys():
147         for pkg in new.keys():
148             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
149             if len(ql) > 0:
150                 for file_entry in new[pkg]["files"]:
151                     if files[file_entry].has_key("new"):
152                         del files[file_entry]["new"]
153                 del new[pkg]
154
155     if warn:
156         for s in ['stable', 'oldstable']:
157             if changes["suite"].has_key(s):
158                 print "WARNING: overrides will be added for %s!" % s
159         for pkg in new.keys():
160             if new[pkg].has_key("othercomponents"):
161                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
162
163     return new
164
165 ################################################################################
166
167 def check_valid(new):
168     """
169     Check if section and priority for NEW packages exist in database.
170     Additionally does sanity checks:
171       - debian-installer packages have to be udeb (or source)
172       - non debian-installer packages can not be udeb
173       - source priority can only be assigned to dsc file types
174
175     @type new: dict
176     @param new: Dict of new packages with their section, priority and type.
177
178     """
179     for pkg in new.keys():
180         section_name = new[pkg]["section"]
181         priority_name = new[pkg]["priority"]
182         file_type = new[pkg]["type"]
183
184         section = get_section(section_name)
185         if section is None:
186             new[pkg]["section id"] = -1
187         else:
188             new[pkg]["section id"] = section.section_id
189
190         priority = get_priority(priority_name)
191         if priority is None:
192             new[pkg]["priority id"] = -1
193         else:
194             new[pkg]["priority id"] = priority.priority_id
195
196         # Sanity checks
197         di = section_name.find("debian-installer") != -1
198
199         # If d-i, we must be udeb and vice-versa
200         if     (di and file_type not in ("udeb", "dsc")) or \
201            (not di and file_type == "udeb"):
202             new[pkg]["section id"] = -1
203
204         # If dsc we need to be source and vice-versa
205         if (priority == "source" and file_type != "dsc") or \
206            (priority != "source" and file_type == "dsc"):
207             new[pkg]["priority id"] = -1
208
209 ###############################################################################
210
211 def lookup_uid_from_fingerprint(fpr, session):
212     uid = None
213     uid_name = ""
214     # This is a stupid default, but see the comments below
215     is_dm = False
216
217     user = get_uid_from_fingerprint(fpr, session)
218
219     if user is not None:
220         uid = user.uid
221         if user.name is None:
222             uid_name = ''
223         else:
224             uid_name = user.name
225
226         # Check the relevant fingerprint (which we have to have)
227         for f in user.fingerprint:
228             if f.fingerprint == fpr:
229                 is_dm = f.keyring.debian_maintainer
230                 break
231
232     return (uid, uid_name, is_dm)
233
234 ###############################################################################
235
236 # Used by Upload.check_timestamps
237 class TarTime(object):
238     def __init__(self, future_cutoff, past_cutoff):
239         self.reset()
240         self.future_cutoff = future_cutoff
241         self.past_cutoff = past_cutoff
242
243     def reset(self):
244         self.future_files = {}
245         self.ancient_files = {}
246
247     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
248         if MTime > self.future_cutoff:
249             self.future_files[Name] = MTime
250         if MTime < self.past_cutoff:
251             self.ancient_files[Name] = MTime
252
253 ###############################################################################
254
255 class Upload(object):
256     """
257     Everything that has to do with an upload processed.
258
259     """
260     def __init__(self):
261         self.logger = None
262         self.pkg = Changes()
263         self.reset()
264
265     ###########################################################################
266
267     def reset (self):
268         """ Reset a number of internal variables."""
269
270         # Initialize the substitution template map
271         cnf = Config()
272         self.Subst = {}
273         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
274         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
275         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
276         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
277
278         self.rejects = []
279         self.warnings = []
280         self.notes = []
281
282         self.pkg.reset()
283
284     def package_info(self):
285         msg = ''
286
287         if len(self.rejects) > 0:
288             msg += "Reject Reasons:\n"
289             msg += "\n".join(self.rejects)
290
291         if len(self.warnings) > 0:
292             msg += "Warnings:\n"
293             msg += "\n".join(self.warnings)
294
295         if len(self.notes) > 0:
296             msg += "Notes:\n"
297             msg += "\n".join(self.notes)
298
299         return msg
300
301     ###########################################################################
302     def update_subst(self):
303         """ Set up the per-package template substitution mappings """
304
305         cnf = Config()
306
307         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
308         if not self.pkg.changes.has_key("architecture") or not \
309            isinstance(changes["architecture"], DictType):
310             self.pkg.changes["architecture"] = { "Unknown" : "" }
311
312         # and maintainer2047 may not exist.
313         if not self.pkg.changes.has_key("maintainer2047"):
314             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
315
316         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
317         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
318         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
319
320         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
321         if self.pkg.changes["architecture"].has_key("source") and \
322            self.pkg.changes["changedby822"] != "" and \
323            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
324
325             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
326             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
327             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
328         else:
329             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
330             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
331             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
332
333         if "sponsoremail" in self.pkg.changes:
334             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
335
336         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
337             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
338
339         # Apply any global override of the Maintainer field
340         if cnf.get("Dinstall::OverrideMaintainer"):
341             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
342             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
343
344         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
345         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
346         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
347
348     ###########################################################################
349     def load_changes(self, filename):
350         """
351         @rtype boolean
352         @rvalue: whether the changes file was valid or not.  We may want to
353                  reject even if this is True (see what gets put in self.rejects).
354                  This is simply to prevent us even trying things later which will
355                  fail because we couldn't properly parse the file.
356         """
357         Cnf = Config()
358         self.pkg.changes_file = filename
359
360         # Parse the .changes field into a dictionary
361         try:
362             self.pkg.changes.update(parse_changes(filename))
363         except CantOpenError:
364             self.rejects.append("%s: can't read file." % (filename))
365             return False
366         except ParseChangesError, line:
367             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
368             return False
369         except ChangesUnicodeError:
370             self.rejects.append("%s: changes file not proper utf-8" % (filename))
371             return False
372
373         # Parse the Files field from the .changes into another dictionary
374         try:
375             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
376         except ParseChangesError, line:
377             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
378             return False
379         except UnknownFormatError, format:
380             self.rejects.append("%s: unknown format '%s'." % (filename, format))
381             return False
382
383         # Check for mandatory fields
384         for i in ("distribution", "source", "binary", "architecture",
385                   "version", "maintainer", "files", "changes", "description"):
386             if not self.pkg.changes.has_key(i):
387                 # Avoid undefined errors later
388                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
389                 return False
390
391         # Strip a source version in brackets from the source field
392         if re_strip_srcver.search(self.pkg.changes["source"]):
393             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
394
395         # Ensure the source field is a valid package name.
396         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
397             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
398
399         # Split multi-value fields into a lower-level dictionary
400         for i in ("architecture", "distribution", "binary", "closes"):
401             o = self.pkg.changes.get(i, "")
402             if o != "":
403                 del self.pkg.changes[i]
404
405             self.pkg.changes[i] = {}
406
407             for j in o.split():
408                 self.pkg.changes[i][j] = 1
409
410         # Fix the Maintainer: field to be RFC822/2047 compatible
411         try:
412             (self.pkg.changes["maintainer822"],
413              self.pkg.changes["maintainer2047"],
414              self.pkg.changes["maintainername"],
415              self.pkg.changes["maintaineremail"]) = \
416                    fix_maintainer (self.pkg.changes["maintainer"])
417         except ParseMaintError, msg:
418             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
419                    % (filename, changes["maintainer"], msg))
420
421         # ...likewise for the Changed-By: field if it exists.
422         try:
423             (self.pkg.changes["changedby822"],
424              self.pkg.changes["changedby2047"],
425              self.pkg.changes["changedbyname"],
426              self.pkg.changes["changedbyemail"]) = \
427                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
428         except ParseMaintError, msg:
429             self.pkg.changes["changedby822"] = ""
430             self.pkg.changes["changedby2047"] = ""
431             self.pkg.changes["changedbyname"] = ""
432             self.pkg.changes["changedbyemail"] = ""
433
434             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
435                    % (filename, changes["changed-by"], msg))
436
437         # Ensure all the values in Closes: are numbers
438         if self.pkg.changes.has_key("closes"):
439             for i in self.pkg.changes["closes"].keys():
440                 if re_isanum.match (i) == None:
441                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
442
443         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
444         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
445         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
446
447         # Check there isn't already a changes file of the same name in one
448         # of the queue directories.
449         base_filename = os.path.basename(filename)
450         for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
451             if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
452                 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
453
454         # Check the .changes is non-empty
455         if not self.pkg.files:
456             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
457             return False
458
459         # Changes was syntactically valid even if we'll reject
460         return True
461
462     ###########################################################################
463
464     def check_distributions(self):
465         "Check and map the Distribution field"
466
467         Cnf = Config()
468
469         # Handle suite mappings
470         for m in Cnf.ValueList("SuiteMappings"):
471             args = m.split()
472             mtype = args[0]
473             if mtype == "map" or mtype == "silent-map":
474                 (source, dest) = args[1:3]
475                 if self.pkg.changes["distribution"].has_key(source):
476                     del self.pkg.changes["distribution"][source]
477                     self.pkg.changes["distribution"][dest] = 1
478                     if mtype != "silent-map":
479                         self.notes.append("Mapping %s to %s." % (source, dest))
480                 if self.pkg.changes.has_key("distribution-version"):
481                     if self.pkg.changes["distribution-version"].has_key(source):
482                         self.pkg.changes["distribution-version"][source]=dest
483             elif mtype == "map-unreleased":
484                 (source, dest) = args[1:3]
485                 if self.pkg.changes["distribution"].has_key(source):
486                     for arch in self.pkg.changes["architecture"].keys():
487                         if arch not in [ arch_string for a in get_suite_architectures(source) ]:
488                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
489                             del self.pkg.changes["distribution"][source]
490                             self.pkg.changes["distribution"][dest] = 1
491                             break
492             elif mtype == "ignore":
493                 suite = args[1]
494                 if self.pkg.changes["distribution"].has_key(suite):
495                     del self.pkg.changes["distribution"][suite]
496                     self.warnings.append("Ignoring %s as a target suite." % (suite))
497             elif mtype == "reject":
498                 suite = args[1]
499                 if self.pkg.changes["distribution"].has_key(suite):
500                     self.rejects.append("Uploads to %s are not accepted." % (suite))
501             elif mtype == "propup-version":
502                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
503                 #
504                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
505                 if self.pkg.changes["distribution"].has_key(args[1]):
506                     self.pkg.changes.setdefault("distribution-version", {})
507                     for suite in args[2:]:
508                         self.pkg.changes["distribution-version"][suite] = suite
509
510         # Ensure there is (still) a target distribution
511         if len(self.pkg.changes["distribution"].keys()) < 1:
512             self.rejects.append("No valid distribution remaining.")
513
514         # Ensure target distributions exist
515         for suite in self.pkg.changes["distribution"].keys():
516             if not Cnf.has_key("Suite::%s" % (suite)):
517                 self.rejects.append("Unknown distribution `%s'." % (suite))
518
519     ###########################################################################
520
521     def binary_file_checks(self, f, session):
522         cnf = Config()
523         entry = self.pkg.files[f]
524
525         # Extract package control information
526         deb_file = utils.open_file(f)
527         try:
528             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
529         except:
530             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
531             deb_file.close()
532             # Can't continue, none of the checks on control would work.
533             return
534
535         # Check for mandantory "Description:"
536         deb_file.seek(0)
537         try:
538             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
539         except:
540             self.rejects.append("%s: Missing Description in binary package" % (f))
541             return
542
543         deb_file.close()
544
545         # Check for mandatory fields
546         for field in [ "Package", "Architecture", "Version" ]:
547             if control.Find(field) == None:
548                 # Can't continue
549                 self.rejects.append("%s: No %s field in control." % (f, field))
550                 return
551
552         # Ensure the package name matches the one give in the .changes
553         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
554             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
555
556         # Validate the package field
557         package = control.Find("Package")
558         if not re_valid_pkg_name.match(package):
559             self.rejects.append("%s: invalid package name '%s'." % (f, package))
560
561         # Validate the version field
562         version = control.Find("Version")
563         if not re_valid_version.match(version):
564             self.rejects.append("%s: invalid version number '%s'." % (f, version))
565
566         # Ensure the architecture of the .deb is one we know about.
567         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
568         architecture = control.Find("Architecture")
569         upload_suite = self.pkg.changes["distribution"].keys()[0]
570
571         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
572             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
573             self.rejects.append("Unknown architecture '%s'." % (architecture))
574
575         # Ensure the architecture of the .deb is one of the ones
576         # listed in the .changes.
577         if not self.pkg.changes["architecture"].has_key(architecture):
578             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
579
580         # Sanity-check the Depends field
581         depends = control.Find("Depends")
582         if depends == '':
583             self.rejects.append("%s: Depends field is empty." % (f))
584
585         # Sanity-check the Provides field
586         provides = control.Find("Provides")
587         if provides:
588             provide = re_spacestrip.sub('', provides)
589             if provide == '':
590                 self.rejects.append("%s: Provides field is empty." % (f))
591             prov_list = provide.split(",")
592             for prov in prov_list:
593                 if not re_valid_pkg_name.match(prov):
594                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
595
596         # Check the section & priority match those given in the .changes (non-fatal)
597         if     control.Find("Section") and entry["section"] != "" \
598            and entry["section"] != control.Find("Section"):
599             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
600                                 (f, control.Find("Section", ""), entry["section"]))
601         if control.Find("Priority") and entry["priority"] != "" \
602            and entry["priority"] != control.Find("Priority"):
603             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
604                                 (f, control.Find("Priority", ""), entry["priority"]))
605
606         entry["package"] = package
607         entry["architecture"] = architecture
608         entry["version"] = version
609         entry["maintainer"] = control.Find("Maintainer", "")
610
611         if f.endswith(".udeb"):
612             self.pkg.files[f]["dbtype"] = "udeb"
613         elif f.endswith(".deb"):
614             self.pkg.files[f]["dbtype"] = "deb"
615         else:
616             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
617
618         entry["source"] = control.Find("Source", entry["package"])
619
620         # Get the source version
621         source = entry["source"]
622         source_version = ""
623
624         if source.find("(") != -1:
625             m = re_extract_src_version.match(source)
626             source = m.group(1)
627             source_version = m.group(2)
628
629         if not source_version:
630             source_version = self.pkg.files[f]["version"]
631
632         entry["source package"] = source
633         entry["source version"] = source_version
634
635         # Ensure the filename matches the contents of the .deb
636         m = re_isadeb.match(f)
637
638         #  package name
639         file_package = m.group(1)
640         if entry["package"] != file_package:
641             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
642                                 (f, file_package, entry["dbtype"], entry["package"]))
643         epochless_version = re_no_epoch.sub('', control.Find("Version"))
644
645         #  version
646         file_version = m.group(2)
647         if epochless_version != file_version:
648             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
649                                 (f, file_version, entry["dbtype"], epochless_version))
650
651         #  architecture
652         file_architecture = m.group(3)
653         if entry["architecture"] != file_architecture:
654             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
655                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
656
657         # Check for existent source
658         source_version = entry["source version"]
659         source_package = entry["source package"]
660         if self.pkg.changes["architecture"].has_key("source"):
661             if source_version != self.pkg.changes["version"]:
662                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
663                                     (source_version, f, self.pkg.changes["version"]))
664         else:
665             # Check in the SQL database
666             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
667                 # Check in one of the other directories
668                 source_epochless_version = re_no_epoch.sub('', source_version)
669                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
670                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
671                     entry["byhand"] = 1
672                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
673                     entry["new"] = 1
674                 else:
675                     dsc_file_exists = False
676                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
677                         if cnf.has_key("Dir::Queue::%s" % (myq)):
678                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
679                                 dsc_file_exists = True
680                                 break
681
682                     if not dsc_file_exists:
683                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
684
685         # Check the version and for file overwrites
686         self.check_binary_against_db(f, session)
687
688         b = Binary(f)
689         b.scan_package()
690         if len(b.rejects) > 0:
691             for j in b.rejects:
692                 self.rejects.append(j)
693
694     def source_file_checks(self, f, session):
695         entry = self.pkg.files[f]
696
697         m = re_issource.match(f)
698         if not m:
699             return
700
701         entry["package"] = m.group(1)
702         entry["version"] = m.group(2)
703         entry["type"] = m.group(3)
704
705         # Ensure the source package name matches the Source filed in the .changes
706         if self.pkg.changes["source"] != entry["package"]:
707             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
708
709         # Ensure the source version matches the version in the .changes file
710         if entry["type"] == "orig.tar.gz":
711             changes_version = self.pkg.changes["chopversion2"]
712         else:
713             changes_version = self.pkg.changes["chopversion"]
714
715         if changes_version != entry["version"]:
716             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
717
718         # Ensure the .changes lists source in the Architecture field
719         if not self.pkg.changes["architecture"].has_key("source"):
720             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
721
722         # Check the signature of a .dsc file
723         if entry["type"] == "dsc":
724             # check_signature returns either:
725             #  (None, [list, of, rejects]) or (signature, [])
726             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
727             for j in rejects:
728                 self.rejects.append(j)
729
730         entry["architecture"] = "source"
731
732     def per_suite_file_checks(self, f, suite, session):
733         cnf = Config()
734         entry = self.pkg.files[f]
735         archive = utils.where_am_i()
736
737         # Skip byhand
738         if entry.has_key("byhand"):
739             return
740
741         # Handle component mappings
742         for m in cnf.ValueList("ComponentMappings"):
743             (source, dest) = m.split()
744             if entry["component"] == source:
745                 entry["original component"] = source
746                 entry["component"] = dest
747
748         # Ensure the component is valid for the target suite
749         if cnf.has_key("Suite:%s::Components" % (suite)) and \
750            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
751             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
752             return
753
754         # Validate the component
755         component = entry["component"]
756         if not get_component(component, session):
757             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
758             return
759
760         # See if the package is NEW
761         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
762             entry["new"] = 1
763
764         # Validate the priority
765         if entry["priority"].find('/') != -1:
766             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
767
768         # Determine the location
769         location = cnf["Dir::Pool"]
770         l = get_location(location, entry["component"], archive, session)
771         if l is None:
772             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
773             entry["location id"] = -1
774         else:
775             entry["location id"] = l.location_id
776
777         # Check the md5sum & size against existing files (if any)
778         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
779
780         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
781                                          entry["size"], entry["md5sum"], entry["location id"])
782
783         if found is None:
784             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
785         elif found is False and poolfile is not None:
786             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
787         else:
788             if poolfile is None:
789                 entry["files id"] = None
790             else:
791                 entry["files id"] = poolfile.file_id
792
793         # Check for packages that have moved from one component to another
794         entry['suite'] = suite
795         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
796         if res.rowcount > 0:
797             entry["othercomponents"] = res.fetchone()[0]
798
799     def check_files(self, action=True):
800         archive = utils.where_am_i()
801         file_keys = self.pkg.files.keys()
802         holding = Holding()
803         cnf = Config()
804
805         # XXX: As far as I can tell, this can no longer happen - see
806         #      comments by AJ in old revisions - mhy
807         # if reprocess is 2 we've already done this and we're checking
808         # things again for the new .orig.tar.gz.
809         # [Yes, I'm fully aware of how disgusting this is]
810         if action and self.reprocess < 2:
811             cwd = os.getcwd()
812             os.chdir(self.pkg.directory)
813             for f in file_keys:
814                 ret = holding.copy_to_holding(f)
815                 if ret is not None:
816                     # XXX: Should we bail out here or try and continue?
817                     self.rejects.append(ret)
818
819             os.chdir(cwd)
820
821         # Check there isn't already a .changes or .dak file of the same name in
822         # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
823         # [NB: this check must be done post-suite mapping]
824         base_filename = os.path.basename(self.pkg.changes_file)
825         dot_dak_filename = base_filename[:-8] + ".dak"
826
827         for suite in self.pkg.changes["distribution"].keys():
828             copychanges = "Suite::%s::CopyChanges" % (suite)
829             if cnf.has_key(copychanges) and \
830                    os.path.exists(os.path.join(cnf[copychanges], base_filename)):
831                 self.rejects.append("%s: a file with this name already exists in %s" \
832                            % (base_filename, cnf[copychanges]))
833
834             copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
835             if cnf.has_key(copy_dot_dak) and \
836                    os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
837                 self.rejects.append("%s: a file with this name already exists in %s" \
838                            % (dot_dak_filename, Cnf[copy_dot_dak]))
839
840         self.reprocess = 0
841         has_binaries = False
842         has_source = False
843
844         session = DBConn().session()
845
846         for f, entry in self.pkg.files.items():
847             # Ensure the file does not already exist in one of the accepted directories
848             for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
849                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
850                 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
851                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
852
853             if not re_taint_free.match(f):
854                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
855
856             # Check the file is readable
857             if os.access(f, os.R_OK) == 0:
858                 # When running in -n, copy_to_holding() won't have
859                 # generated the reject_message, so we need to.
860                 if action:
861                     if os.path.exists(f):
862                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
863                     else:
864                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
865                 entry["type"] = "unreadable"
866                 continue
867
868             # If it's byhand skip remaining checks
869             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
870                 entry["byhand"] = 1
871                 entry["type"] = "byhand"
872
873             # Checks for a binary package...
874             elif re_isadeb.match(f):
875                 has_binaries = True
876                 entry["type"] = "deb"
877
878                 # This routine appends to self.rejects/warnings as appropriate
879                 self.binary_file_checks(f, session)
880
881             # Checks for a source package...
882             elif re_issource.match(f):
883                 has_source = True
884
885                 # This routine appends to self.rejects/warnings as appropriate
886                 self.source_file_checks(f, session)
887
888             # Not a binary or source package?  Assume byhand...
889             else:
890                 entry["byhand"] = 1
891                 entry["type"] = "byhand"
892
893             # Per-suite file checks
894             entry["oldfiles"] = {}
895             for suite in self.pkg.changes["distribution"].keys():
896                 self.per_suite_file_checks(f, suite, session)
897
898         # If the .changes file says it has source, it must have source.
899         if self.pkg.changes["architecture"].has_key("source"):
900             if not has_source:
901                 self.rejects.append("no source found and Architecture line in changes mention source.")
902
903             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
904                 self.rejects.append("source only uploads are not supported.")
905
906     ###########################################################################
907     def check_dsc(self, action=True):
908         """Returns bool indicating whether or not the source changes are valid"""
909         # Ensure there is source to check
910         if not self.pkg.changes["architecture"].has_key("source"):
911             return True
912
913         # Find the .dsc
914         dsc_filename = None
915         for f, entry in self.pkg.files.items():
916             if entry["type"] == "dsc":
917                 if dsc_filename:
918                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
919                     return False
920                 else:
921                     dsc_filename = f
922
923         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
924         if not dsc_filename:
925             self.rejects.append("source uploads must contain a dsc file")
926             return False
927
928         # Parse the .dsc file
929         try:
930             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
931         except CantOpenError:
932             # if not -n copy_to_holding() will have done this for us...
933             if not action:
934                 self.rejects.append("%s: can't read file." % (dsc_filename))
935         except ParseChangesError, line:
936             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
937         except InvalidDscError, line:
938             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
939         except ChangesUnicodeError:
940             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
941
942         # Build up the file list of files mentioned by the .dsc
943         try:
944             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
945         except NoFilesFieldError:
946             self.rejects.append("%s: no Files: field." % (dsc_filename))
947             return False
948         except UnknownFormatError, format:
949             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
950             return False
951         except ParseChangesError, line:
952             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
953             return False
954
955         # Enforce mandatory fields
956         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
957             if not self.pkg.dsc.has_key(i):
958                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
959                 return False
960
961         # Validate the source and version fields
962         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
963             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
964         if not re_valid_version.match(self.pkg.dsc["version"]):
965             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
966
967         # Bumping the version number of the .dsc breaks extraction by stable's
968         # dpkg-source.  So let's not do that...
969         if self.pkg.dsc["format"] != "1.0":
970             self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
971
972         # Validate the Maintainer field
973         try:
974             # We ignore the return value
975             fix_maintainer(self.pkg.dsc["maintainer"])
976         except ParseMaintError, msg:
977             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
978                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
979
980         # Validate the build-depends field(s)
981         for field_name in [ "build-depends", "build-depends-indep" ]:
982             field = self.pkg.dsc.get(field_name)
983             if field:
984                 # Check for broken dpkg-dev lossage...
985                 if field.startswith("ARRAY"):
986                     self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
987                                         (dsc_filename, field_name.title()))
988
989                 # Have apt try to parse them...
990                 try:
991                     apt_pkg.ParseSrcDepends(field)
992                 except:
993                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
994
995         # Ensure the version number in the .dsc matches the version number in the .changes
996         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
997         changes_version = self.pkg.files[dsc_filename]["version"]
998
999         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1000             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1001
1002         # Ensure there is a .tar.gz in the .dsc file
1003         has_tar = False
1004         for f in self.pkg.dsc_files.keys():
1005             m = re_issource.match(f)
1006             if not m:
1007                 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1008                 continue
1009             ftype = m.group(3)
1010             if ftype == "orig.tar.gz" or ftype == "tar.gz":
1011                 has_tar = True
1012
1013         if not has_tar:
1014             self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1015
1016         # Ensure source is newer than existing source in target suites
1017         self.check_source_against_db(dsc_filename)
1018
1019         self.check_dsc_against_db(dsc_filename)
1020
1021         return True
1022
1023     ###########################################################################
1024
1025     def get_changelog_versions(self, source_dir):
1026         """Extracts a the source package and (optionally) grabs the
1027         version history out of debian/changelog for the BTS."""
1028
1029         cnf = Config()
1030
1031         # Find the .dsc (again)
1032         dsc_filename = None
1033         for f in self.pkg.files.keys():
1034             if self.pkg.files[f]["type"] == "dsc":
1035                 dsc_filename = f
1036
1037         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1038         if not dsc_filename:
1039             return
1040
1041         # Create a symlink mirror of the source files in our temporary directory
1042         for f in self.pkg.files.keys():
1043             m = re_issource.match(f)
1044             if m:
1045                 src = os.path.join(source_dir, f)
1046                 # If a file is missing for whatever reason, give up.
1047                 if not os.path.exists(src):
1048                     return
1049                 ftype = m.group(3)
1050                 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1051                     continue
1052                 dest = os.path.join(os.getcwd(), f)
1053                 os.symlink(src, dest)
1054
1055         # If the orig.tar.gz is not a part of the upload, create a symlink to the
1056         # existing copy.
1057         if self.pkg.orig_tar_gz:
1058             dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1059             os.symlink(self.pkg.orig_tar_gz, dest)
1060
1061         # Extract the source
1062         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1063         (result, output) = commands.getstatusoutput(cmd)
1064         if (result != 0):
1065             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1066             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1067             return
1068
1069         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1070             return
1071
1072         # Get the upstream version
1073         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1074         if re_strip_revision.search(upstr_version):
1075             upstr_version = re_strip_revision.sub('', upstr_version)
1076
1077         # Ensure the changelog file exists
1078         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1079         if not os.path.exists(changelog_filename):
1080             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1081             return
1082
1083         # Parse the changelog
1084         self.pkg.dsc["bts changelog"] = ""
1085         changelog_file = utils.open_file(changelog_filename)
1086         for line in changelog_file.readlines():
1087             m = re_changelog_versions.match(line)
1088             if m:
1089                 self.pkg.dsc["bts changelog"] += line
1090         changelog_file.close()
1091
1092         # Check we found at least one revision in the changelog
1093         if not self.pkg.dsc["bts changelog"]:
1094             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1095
1096     def check_source(self):
1097         # XXX: I'm fairly sure reprocess == 2 can never happen
1098         #      AJT disabled the is_incoming check years ago - mhy
1099         #      We should probably scrap or rethink the whole reprocess thing
1100         # Bail out if:
1101         #    a) there's no source
1102         # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1103         # or c) the orig.tar.gz is MIA
1104         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1105            or self.pkg.orig_tar_gz == -1:
1106             return
1107
1108         tmpdir = utils.temp_dirname()
1109
1110         # Move into the temporary directory
1111         cwd = os.getcwd()
1112         os.chdir(tmpdir)
1113
1114         # Get the changelog version history
1115         self.get_changelog_versions(cwd)
1116
1117         # Move back and cleanup the temporary tree
1118         os.chdir(cwd)
1119
1120         try:
1121             shutil.rmtree(tmpdir)
1122         except OSError, e:
1123             if e.errno != errno.EACCES:
1124                 print "foobar"
1125                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1126
1127             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1128             # We probably have u-r or u-w directories so chmod everything
1129             # and try again.
1130             cmd = "chmod -R u+rwx %s" % (tmpdir)
1131             result = os.system(cmd)
1132             if result != 0:
1133                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1134             shutil.rmtree(tmpdir)
1135         except Exception, e:
1136             print "foobar2 (%s)" % e
1137             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1138
1139     ###########################################################################
1140     def ensure_hashes(self):
1141         # Make sure we recognise the format of the Files: field in the .changes
1142         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1143         if len(format) == 2:
1144             format = int(format[0]), int(format[1])
1145         else:
1146             format = int(float(format[0])), 0
1147
1148         # We need to deal with the original changes blob, as the fields we need
1149         # might not be in the changes dict serialised into the .dak anymore.
1150         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1151
1152         # Copy the checksums over to the current changes dict.  This will keep
1153         # the existing modifications to it intact.
1154         for field in orig_changes:
1155             if field.startswith('checksums-'):
1156                 self.pkg.changes[field] = orig_changes[field]
1157
1158         # Check for unsupported hashes
1159         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1160             self.rejects.append(j)
1161
1162         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1163             self.rejects.append(j)
1164
1165         # We have to calculate the hash if we have an earlier changes version than
1166         # the hash appears in rather than require it exist in the changes file
1167         for hashname, hashfunc, version in utils.known_hashes:
1168             # TODO: Move _ensure_changes_hash into this class
1169             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1170                 self.rejects.append(j)
1171             if "source" in self.pkg.changes["architecture"]:
1172                 # TODO: Move _ensure_dsc_hash into this class
1173                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1174                     self.rejects.append(j)
1175
1176     def check_hashes(self):
1177         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1178             self.rejects.append(m)
1179
1180         for m in utils.check_size(".changes", self.pkg.files):
1181             self.rejects.append(m)
1182
1183         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1184             self.rejects.append(m)
1185
1186         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1187             self.rejects.append(m)
1188
1189         self.ensure_hashes()
1190
1191     ###########################################################################
1192     def check_urgency(self):
1193         cnf = Config()
1194         if self.pkg.changes["architecture"].has_key("source"):
1195             if not self.pkg.changes.has_key("urgency"):
1196                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1197             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1198             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1199                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1200                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1201                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1202
1203     ###########################################################################
1204
1205     # Sanity check the time stamps of files inside debs.
1206     # [Files in the near future cause ugly warnings and extreme time
1207     #  travel can cause errors on extraction]
1208
1209     def check_timestamps(self):
1210         Cnf = Config()
1211
1212         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1213         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1214         tar = TarTime(future_cutoff, past_cutoff)
1215
1216         for filename, entry in self.pkg.files.items():
1217             if entry["type"] == "deb":
1218                 tar.reset()
1219                 try:
1220                     deb_file = utils.open_file(filename)
1221                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1222                     deb_file.seek(0)
1223                     try:
1224                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1225                     except SystemError, e:
1226                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1227                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1228                             raise
1229                         deb_file.seek(0)
1230                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1231
1232                     deb_file.close()
1233
1234                     future_files = tar.future_files.keys()
1235                     if future_files:
1236                         num_future_files = len(future_files)
1237                         future_file = future_files[0]
1238                         future_date = tar.future_files[future_file]
1239                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1240                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1241
1242                     ancient_files = tar.ancient_files.keys()
1243                     if ancient_files:
1244                         num_ancient_files = len(ancient_files)
1245                         ancient_file = ancient_files[0]
1246                         ancient_date = tar.ancient_files[ancient_file]
1247                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1248                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1249                 except:
1250                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1251
1252     ###########################################################################
1253     def check_signed_by_key(self):
1254         """Ensure the .changes is signed by an authorized uploader."""
1255         session = DBConn().session()
1256
1257         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1258
1259         # match claimed name with actual name:
1260         if uid is None:
1261             # This is fundamentally broken but need us to refactor how we get
1262             # the UIDs/Fingerprints in order for us to fix it properly
1263             uid, uid_email = self.pkg.changes["fingerprint"], uid
1264             may_nmu, may_sponsor = 1, 1
1265             # XXX by default new dds don't have a fingerprint/uid in the db atm,
1266             #     and can't get one in there if we don't allow nmu/sponsorship
1267         elif is_dm is False:
1268             # If is_dm is False, we allow full upload rights
1269             uid_email = "%s@debian.org" % (uid)
1270             may_nmu, may_sponsor = 1, 1
1271         else:
1272             # Assume limited upload rights unless we've discovered otherwise
1273             uid_email = uid
1274             may_nmu, may_sponsor = 0, 0
1275
1276         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1277             sponsored = 0
1278         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1279             sponsored = 0
1280             if uid_name == "": sponsored = 1
1281         else:
1282             sponsored = 1
1283             if ("source" in self.pkg.changes["architecture"] and
1284                 uid_email and utils.is_email_alias(uid_email)):
1285                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1286                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1287                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1288                     self.pkg.changes["sponsoremail"] = uid_email
1289
1290         if sponsored and not may_sponsor:
1291             self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1292
1293         if not sponsored and not may_nmu:
1294             should_reject = True
1295             highest_sid, highest_version = None, None
1296
1297             # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1298             #      It ignores higher versions with the dm_upload_allowed flag set to false
1299             #      I'm keeping the existing behaviour for now until I've gone back and
1300             #      checked exactly what the GR says - mhy
1301             for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1302                 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1303                      highest_sid = si.source_id
1304                      highest_version = si.version
1305
1306             if highest_sid is None:
1307                 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1308             else:
1309                 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1310                     (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1311                     if email == uid_email or name == uid_name:
1312                         should_reject = False
1313                         break
1314
1315             if should_reject is True:
1316                 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1317
1318             for b in self.pkg.changes["binary"].keys():
1319                 for suite in self.pkg.changes["distribution"].keys():
1320                     q = session.query(DBSource)
1321                     q = q.join(DBBinary).filter_by(package=b)
1322                     q = q.join(BinAssociation).join(Suite).filter_by(suite)
1323
1324                     for s in q.all():
1325                         if s.source != self.pkg.changes["source"]:
1326                             self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1327
1328             for f in self.pkg.files.keys():
1329                 if self.pkg.files[f].has_key("byhand"):
1330                     self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1331                 if self.pkg.files[f].has_key("new"):
1332                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1333
1334     ###########################################################################
1335     def build_summaries(self):
1336         """ Build a summary of changes the upload introduces. """
1337
1338         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1339
1340         short_summary = summary
1341
1342         # This is for direport's benefit...
1343         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1344
1345         if byhand or new:
1346             summary += "Changes: " + f
1347
1348         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1349
1350         summary += self.announce(short_summary, 0)
1351
1352         return (summary, short_summary)
1353
1354     ###########################################################################
1355
1356     def close_bugs(self, summary, action):
1357         """
1358         Send mail to close bugs as instructed by the closes field in the changes file.
1359         Also add a line to summary if any work was done.
1360
1361         @type summary: string
1362         @param summary: summary text, as given by L{build_summaries}
1363
1364         @type action: bool
1365         @param action: Set to false no real action will be done.
1366
1367         @rtype: string
1368         @return: summary. If action was taken, extended by the list of closed bugs.
1369
1370         """
1371
1372         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1373
1374         bugs = self.pkg.changes["closes"].keys()
1375
1376         if not bugs:
1377             return summary
1378
1379         bugs.sort()
1380         summary += "Closing bugs: "
1381         for bug in bugs:
1382             summary += "%s " % (bug)
1383             if action:
1384                 self.Subst["__BUG_NUMBER__"] = bug
1385                 if self.pkg.changes["distribution"].has_key("stable"):
1386                     self.Subst["__STABLE_WARNING__"] = """
1387 Note that this package is not part of the released stable Debian
1388 distribution.  It may have dependencies on other unreleased software,
1389 or other instabilities.  Please take care if you wish to install it.
1390 The update will eventually make its way into the next released Debian
1391 distribution."""
1392                 else:
1393                     self.Subst["__STABLE_WARNING__"] = ""
1394                     mail_message = utils.TemplateSubst(self.Subst, template)
1395                     utils.send_mail(mail_message)
1396
1397                 # Clear up after ourselves
1398                 del self.Subst["__BUG_NUMBER__"]
1399                 del self.Subst["__STABLE_WARNING__"]
1400
1401         if action and self.logger:
1402             self.logger.log(["closing bugs"] + bugs)
1403
1404         summary += "\n"
1405
1406         return summary
1407
1408     ###########################################################################
1409
1410     def announce(self, short_summary, action):
1411         """
1412         Send an announce mail about a new upload.
1413
1414         @type short_summary: string
1415         @param short_summary: Short summary text to include in the mail
1416
1417         @type action: bool
1418         @param action: Set to false no real action will be done.
1419
1420         @rtype: string
1421         @return: Textstring about action taken.
1422
1423         """
1424
1425         cnf = Config()
1426         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1427
1428         # Only do announcements for source uploads with a recent dpkg-dev installed
1429         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1430            self.pkg.changes["architecture"].has_key("source"):
1431             return ""
1432
1433         lists_done = {}
1434         summary = ""
1435
1436         self.Subst["__SHORT_SUMMARY__"] = short_summary
1437
1438         for dist in self.pkg.changes["distribution"].keys():
1439             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1440             if announce_list == "" or lists_done.has_key(announce_list):
1441                 continue
1442
1443             lists_done[announce_list] = 1
1444             summary += "Announcing to %s\n" % (announce_list)
1445
1446             if action:
1447                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1448                 if cnf.get("Dinstall::TrackingServer") and \
1449                    self.pkg.changes["architecture"].has_key("source"):
1450                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1451                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1452
1453                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1454                 utils.send_mail(mail_message)
1455
1456                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1457
1458         if cnf.FindB("Dinstall::CloseBugs"):
1459             summary = self.close_bugs(summary, action)
1460
1461         del self.Subst["__SHORT_SUMMARY__"]
1462
1463         return summary
1464
1465     ###########################################################################
1466
1467     def accept (self, summary, short_summary, targetdir=None):
1468         """
1469         Accept an upload.
1470
1471         This moves all files referenced from the .changes into the I{accepted}
1472         queue, sends the accepted mail, announces to lists, closes bugs and
1473         also checks for override disparities. If enabled it will write out
1474         the version history for the BTS Version Tracking and will finally call
1475         L{queue_build}.
1476
1477         @type summary: string
1478         @param summary: Summary text
1479
1480         @type short_summary: string
1481         @param short_summary: Short summary
1482
1483         """
1484
1485         cnf = Config()
1486         stats = SummaryStats()
1487
1488         accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1489
1490         if targetdir is None:
1491             targetdir = cnf["Dir::Queue::Accepted"]
1492
1493         print "Accepting."
1494         if self.logger:
1495             self.logger.log(["Accepting changes", self.pkg.changes_file])
1496
1497         self.pkg.write_dot_dak(targetdir)
1498
1499         # Move all the files into the accepted directory
1500         utils.move(self.pkg.changes_file, targetdir)
1501
1502         for name, entry in sorted(self.pkg.files.items()):
1503             utils.move(name, targetdir)
1504             stats.accept_bytes += float(entry["size"])
1505
1506         stats.accept_count += 1
1507
1508         # Send accept mail, announce to lists, close bugs and check for
1509         # override disparities
1510         if not cnf["Dinstall::Options::No-Mail"]:
1511             self.Subst["__SUITE__"] = ""
1512             self.Subst["__SUMMARY__"] = summary
1513             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1514             utils.send_mail(mail_message)
1515             self.announce(short_summary, 1)
1516
1517         ## Helper stuff for DebBugs Version Tracking
1518         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1519             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1520             # the conditionalization on dsc["bts changelog"] should be
1521             # dropped.
1522
1523             # Write out the version history from the changelog
1524             if self.pkg.changes["architecture"].has_key("source") and \
1525                self.pkg.dsc.has_key("bts changelog"):
1526
1527                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1528                 version_history = os.fdopen(fd, 'w')
1529                 version_history.write(self.pkg.dsc["bts changelog"])
1530                 version_history.close()
1531                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1532                                       self.pkg.changes_file[:-8]+".versions")
1533                 os.rename(temp_filename, filename)
1534                 os.chmod(filename, 0644)
1535
1536             # Write out the binary -> source mapping.
1537             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1538             debinfo = os.fdopen(fd, 'w')
1539             for name, entry in sorted(self.pkg.files.items()):
1540                 if entry["type"] == "deb":
1541                     line = " ".join([entry["package"], entry["version"],
1542                                      entry["architecture"], entry["source package"],
1543                                      entry["source version"]])
1544                     debinfo.write(line+"\n")
1545             debinfo.close()
1546             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1547                                   self.pkg.changes_file[:-8]+".debinfo")
1548             os.rename(temp_filename, filename)
1549             os.chmod(filename, 0644)
1550
1551         # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1552         # <Ganneff> we do call queue_build too
1553         # <mhy> well yes, we'd have had to if we were inserting into accepted
1554         # <Ganneff> now. thats database only.
1555         # <mhy> urgh, that's going to get messy
1556         # <Ganneff> so i make the p-n call to it *also* using accepted/
1557         # <mhy> but then the packages will be in the queue_build table without the files being there
1558         # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1559         # <mhy> ah, good point
1560         # <Ganneff> so it will work out, as unchecked move it over
1561         # <mhy> that's all completely sick
1562         # <Ganneff> yes
1563
1564         # This routine returns None on success or an error on failure
1565         res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1566         if res:
1567             utils.fubar(res)
1568
1569
1570     def check_override(self):
1571         """
1572         Checks override entries for validity. Mails "Override disparity" warnings,
1573         if that feature is enabled.
1574
1575         Abandons the check if
1576           - override disparity checks are disabled
1577           - mail sending is disabled
1578         """
1579
1580         cnf = Config()
1581
1582         # Abandon the check if:
1583         #  a) override disparity checks have been disabled
1584         #  b) we're not sending mail
1585         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1586            cnf["Dinstall::Options::No-Mail"]:
1587             return
1588
1589         summary = self.pkg.check_override()
1590
1591         if summary == "":
1592             return
1593
1594         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1595
1596         self.Subst["__SUMMARY__"] = summary
1597         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1598         utils.send_mail(mail_message)
1599         del self.Subst["__SUMMARY__"]
1600
1601     ###########################################################################
1602
1603     def remove(self, dir=None):
1604         """
1605         Used (for instance) in p-u to remove the package from unchecked
1606         """
1607         if dir is None:
1608             os.chdir(self.pkg.directory)
1609         else:
1610             os.chdir(dir)
1611
1612         for f in self.pkg.files.keys():
1613             os.unlink(f)
1614         os.unlink(self.pkg.changes_file)
1615
1616     ###########################################################################
1617
1618     def move_to_dir (self, dest, perms=0660, changesperms=0664):
1619         """
1620         Move files to dest with certain perms/changesperms
1621         """
1622         utils.move(self.pkg.changes_file, dest, perms=changesperms)
1623         for f in self.pkg.files.keys():
1624             utils.move(f, dest, perms=perms)
1625
1626     ###########################################################################
1627
1628     def force_reject(self, reject_files):
1629         """
1630         Forcefully move files from the current directory to the
1631         reject directory.  If any file already exists in the reject
1632         directory it will be moved to the morgue to make way for
1633         the new file.
1634
1635         @type files: dict
1636         @param files: file dictionary
1637
1638         """
1639
1640         cnf = Config()
1641
1642         for file_entry in reject_files:
1643             # Skip any files which don't exist or which we don't have permission to copy.
1644             if os.access(file_entry, os.R_OK) == 0:
1645                 continue
1646
1647             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1648
1649             try:
1650                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1651             except OSError, e:
1652                 # File exists?  Let's try and move it to the morgue
1653                 if e.errno == errno.EEXIST:
1654                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1655                     try:
1656                         morgue_file = utils.find_next_free(morgue_file)
1657                     except NoFreeFilenameError:
1658                         # Something's either gone badly Pete Tong, or
1659                         # someone is trying to exploit us.
1660                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1661                         return
1662                     utils.move(dest_file, morgue_file, perms=0660)
1663                     try:
1664                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1665                     except OSError, e:
1666                         # Likewise
1667                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1668                         return
1669                 else:
1670                     raise
1671             # If we got here, we own the destination file, so we can
1672             # safely overwrite it.
1673             utils.move(file_entry, dest_file, 1, perms=0660)
1674             os.close(dest_fd)
1675
1676     ###########################################################################
1677     def do_reject (self, manual=0, reject_message="", note=""):
1678         """
1679         Reject an upload. If called without a reject message or C{manual} is
1680         true, spawn an editor so the user can write one.
1681
1682         @type manual: bool
1683         @param manual: manual or automated rejection
1684
1685         @type reject_message: string
1686         @param reject_message: A reject message
1687
1688         @return: 0
1689
1690         """
1691         # If we weren't given a manual rejection message, spawn an
1692         # editor so the user can add one in...
1693         if manual and not reject_message:
1694             (fd, temp_filename) = utils.temp_filename()
1695             temp_file = os.fdopen(fd, 'w')
1696             if len(note) > 0:
1697                 for line in note:
1698                     temp_file.write(line)
1699             temp_file.close()
1700             editor = os.environ.get("EDITOR","vi")
1701             answer = 'E'
1702             while answer == 'E':
1703                 os.system("%s %s" % (editor, temp_filename))
1704                 temp_fh = utils.open_file(temp_filename)
1705                 reject_message = "".join(temp_fh.readlines())
1706                 temp_fh.close()
1707                 print "Reject message:"
1708                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
1709                 prompt = "[R]eject, Edit, Abandon, Quit ?"
1710                 answer = "XXX"
1711                 while prompt.find(answer) == -1:
1712                     answer = utils.our_raw_input(prompt)
1713                     m = re_default_answer.search(prompt)
1714                     if answer == "":
1715                         answer = m.group(1)
1716                     answer = answer[:1].upper()
1717             os.unlink(temp_filename)
1718             if answer == 'A':
1719                 return 1
1720             elif answer == 'Q':
1721                 sys.exit(0)
1722
1723         print "Rejecting.\n"
1724
1725         cnf = Config()
1726
1727         reason_filename = self.pkg.changes_file[:-8] + ".reason"
1728         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1729
1730         # Move all the files into the reject directory
1731         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1732         self.force_reject(reject_files)
1733
1734         # If we fail here someone is probably trying to exploit the race
1735         # so let's just raise an exception ...
1736         if os.path.exists(reason_filename):
1737             os.unlink(reason_filename)
1738         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1739
1740         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1741
1742         if not manual:
1743             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1744             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1745             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1746             os.write(reason_fd, reject_message)
1747             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1748         else:
1749             # Build up the rejection email
1750             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1751             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1752             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1753             self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1754             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1755             # Write the rejection email out as the <foo>.reason file
1756             os.write(reason_fd, reject_mail_message)
1757
1758         del self.Subst["__REJECTOR_ADDRESS__"]
1759         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1760         del self.Subst["__CC__"]
1761
1762         os.close(reason_fd)
1763
1764         # Send the rejection mail if appropriate
1765         if not cnf["Dinstall::Options::No-Mail"]:
1766             utils.send_mail(reject_mail_message)
1767
1768         if self.logger:
1769             self.logger.log(["rejected", pkg.changes_file])
1770
1771         return 0
1772
1773     ################################################################################
1774     def in_override_p(self, package, component, suite, binary_type, file, session=None):
1775         """
1776         Check if a package already has override entries in the DB
1777
1778         @type package: string
1779         @param package: package name
1780
1781         @type component: string
1782         @param component: database id of the component
1783
1784         @type suite: int
1785         @param suite: database id of the suite
1786
1787         @type binary_type: string
1788         @param binary_type: type of the package
1789
1790         @type file: string
1791         @param file: filename we check
1792
1793         @return: the database result. But noone cares anyway.
1794
1795         """
1796
1797         cnf = Config()
1798
1799         if session is None:
1800             session = DBConn().session()
1801
1802         if binary_type == "": # must be source
1803             file_type = "dsc"
1804         else:
1805             file_type = binary_type
1806
1807         # Override suite name; used for example with proposed-updates
1808         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1809             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1810
1811         result = get_override(package, suite, component, file_type, session)
1812
1813         # If checking for a source package fall back on the binary override type
1814         if file_type == "dsc" and len(result) < 1:
1815             result = get_override(package, suite, component, ['deb', 'udeb'], session)
1816
1817         # Remember the section and priority so we can check them later if appropriate
1818         if len(result) > 0:
1819             result = result[0]
1820             self.pkg.files[file]["override section"] = result.section.section
1821             self.pkg.files[file]["override priority"] = result.priority.priority
1822             return result
1823
1824         return None
1825
1826     ################################################################################
1827     def get_anyversion(self, sv_list, suite):
1828         """
1829         @type sv_list: list
1830         @param sv_list: list of (suite, version) tuples to check
1831
1832         @type suite: string
1833         @param suite: suite name
1834
1835         Description: TODO
1836         """
1837         anyversion = None
1838         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1839         for (s, v) in sv_list:
1840             if s in [ x.lower() for x in anysuite ]:
1841                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1842                     anyversion = v
1843
1844         return anyversion
1845
1846     ################################################################################
1847
1848     def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1849         """
1850         @type sv_list: list
1851         @param sv_list: list of (suite, version) tuples to check
1852
1853         @type file: string
1854         @param file: XXX
1855
1856         @type new_version: string
1857         @param new_version: XXX
1858
1859         Ensure versions are newer than existing packages in target
1860         suites and that cross-suite version checking rules as
1861         set out in the conf file are satisfied.
1862         """
1863
1864         cnf = Config()
1865
1866         # Check versions for each target suite
1867         for target_suite in self.pkg.changes["distribution"].keys():
1868             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1869             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1870
1871             # Enforce "must be newer than target suite" even if conffile omits it
1872             if target_suite not in must_be_newer_than:
1873                 must_be_newer_than.append(target_suite)
1874
1875             for (suite, existent_version) in sv_list:
1876                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1877
1878                 if suite in must_be_newer_than and sourceful and vercmp < 1:
1879                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1880
1881                 if suite in must_be_older_than and vercmp > -1:
1882                     cansave = 0
1883
1884                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1885                         # we really use the other suite, ignoring the conflicting one ...
1886                         addsuite = self.pkg.changes["distribution-version"][suite]
1887
1888                         add_version = self.get_anyversion(sv_list, addsuite)
1889                         target_version = self.get_anyversion(sv_list, target_suite)
1890
1891                         if not add_version:
1892                             # not add_version can only happen if we map to a suite
1893                             # that doesn't enhance the suite we're propup'ing from.
1894                             # so "propup-ver x a b c; map a d" is a problem only if
1895                             # d doesn't enhance a.
1896                             #
1897                             # i think we could always propagate in this case, rather
1898                             # than complaining. either way, this isn't a REJECT issue
1899                             #
1900                             # And - we really should complain to the dorks who configured dak
1901                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1902                             self.pkg.changes.setdefault("propdistribution", {})
1903                             self.pkg.changes["propdistribution"][addsuite] = 1
1904                             cansave = 1
1905                         elif not target_version:
1906                             # not targets_version is true when the package is NEW
1907                             # we could just stick with the "...old version..." REJECT
1908                             # for this, I think.
1909                             self.rejects.append("Won't propogate NEW packages.")
1910                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1911                             # propogation would be redundant. no need to reject though.
1912                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1913                             cansave = 1
1914                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1915                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
1916                             # propogate!!
1917                             self.warnings.append("Propogating upload to %s" % (addsuite))
1918                             self.pkg.changes.setdefault("propdistribution", {})
1919                             self.pkg.changes["propdistribution"][addsuite] = 1
1920                             cansave = 1
1921
1922                     if not cansave:
1923                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1924
1925     ################################################################################
1926     def check_binary_against_db(self, file, session=None):
1927         if session is None:
1928             session = DBConn().session()
1929
1930         # Ensure version is sane
1931         q = session.query(BinAssociation)
1932         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1933         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1934
1935         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1936                                        file, self.pkg.files[file]["version"], sourceful=False)
1937
1938         # Check for any existing copies of the file
1939         q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1940         q = q.filter_by(version=self.pkg.files[file]["version"])
1941         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1942
1943         if q.count() > 0:
1944             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1945
1946     ################################################################################
1947
1948     def check_source_against_db(self, file, session=None):
1949         """
1950         """
1951         if session is None:
1952             session = DBConn().session()
1953
1954         source = self.pkg.dsc.get("source")
1955         version = self.pkg.dsc.get("version")
1956
1957         # Ensure version is sane
1958         q = session.query(SrcAssociation)
1959         q = q.join(DBSource).filter(DBSource.source==source)
1960
1961         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1962                                        file, version, sourceful=True)
1963
1964     ################################################################################
1965     def check_dsc_against_db(self, file, session=None):
1966         """
1967
1968         @warning: NB: this function can remove entries from the 'files' index [if
1969          the .orig.tar.gz is a duplicate of the one in the archive]; if
1970          you're iterating over 'files' and call this function as part of
1971          the loop, be sure to add a check to the top of the loop to
1972          ensure you haven't just tried to dereference the deleted entry.
1973
1974         """
1975
1976         if session is None:
1977             session = DBConn().session()
1978
1979         self.pkg.orig_tar_gz = None
1980
1981         # Try and find all files mentioned in the .dsc.  This has
1982         # to work harder to cope with the multiple possible
1983         # locations of an .orig.tar.gz.
1984         # The ordering on the select is needed to pick the newest orig
1985         # when it exists in multiple places.
1986         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1987             found = None
1988             if self.pkg.files.has_key(dsc_name):
1989                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1990                 actual_size = int(self.pkg.files[dsc_name]["size"])
1991                 found = "%s in incoming" % (dsc_name)
1992
1993                 # Check the file does not already exist in the archive
1994                 ql = get_poolfile_like_name(dsc_name)
1995
1996                 # Strip out anything that isn't '%s' or '/%s$'
1997                 for i in ql:
1998                     if not i.filename.endswith(dsc_name):
1999                         ql.remove(i)
2000
2001                 # "[dak] has not broken them.  [dak] has fixed a
2002                 # brokenness.  Your crappy hack exploited a bug in
2003                 # the old dinstall.
2004                 #
2005                 # "(Come on!  I thought it was always obvious that
2006                 # one just doesn't release different files with
2007                 # the same name and version.)"
2008                 #                        -- ajk@ on d-devel@l.d.o
2009
2010                 if len(ql) > 0:
2011                     # Ignore exact matches for .orig.tar.gz
2012                     match = 0
2013                     if dsc_name.endswith(".orig.tar.gz"):
2014                         for i in ql:
2015                             if self.pkg.files.has_key(dsc_name) and \
2016                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2017                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2018                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2019                                 # TODO: Don't delete the entry, just mark it as not needed
2020                                 # This would fix the stupidity of changing something we often iterate over
2021                                 # whilst we're doing it
2022                                 del self.pkg.files[dsc_name]
2023                                 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2024                                 match = 1
2025
2026                     if not match:
2027                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2028
2029             elif dsc_name.endswith(".orig.tar.gz"):
2030                 # Check in the pool
2031                 ql = get_poolfile_like_name(dsc_name, session)
2032
2033                 # Strip out anything that isn't '%s' or '/%s$'
2034                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2035                 for i in ql:
2036                     if not i.filename.endswith(dsc_name):
2037                         ql.remove(i)
2038
2039                 if len(ql) > 0:
2040                     # Unfortunately, we may get more than one match here if,
2041                     # for example, the package was in potato but had an -sa
2042                     # upload in woody.  So we need to choose the right one.
2043
2044                     # default to something sane in case we don't match any or have only one
2045                     x = ql[0]
2046
2047                     if len(ql) > 1:
2048                         for i in ql:
2049                             old_file = os.path.join(i.location.path, i.filename)
2050                             old_file_fh = utils.open_file(old_file)
2051                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2052                             old_file_fh.close()
2053                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2054                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2055                                 x = i
2056
2057                     old_file = os.path.join(i.location.path, i.filename)
2058                     old_file_fh = utils.open_file(old_file)
2059                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2060                     old_file_fh.close()
2061                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2062                     found = old_file
2063                     suite_type = f.location.archive_type
2064                     # need this for updating dsc_files in install()
2065                     dsc_entry["files id"] = f.file_id
2066                     # See install() in process-accepted...
2067                     self.pkg.orig_tar_id = f.file_id
2068                     self.pkg.orig_tar_gz = old_file
2069                     self.pkg.orig_tar_location = f.location.location_id
2070                 else:
2071                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2072                     # Not there? Check the queue directories...
2073                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2074                         in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2075                         if os.path.exists(in_otherdir):
2076                             in_otherdir_fh = utils.open_file(in_otherdir)
2077                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2078                             in_otherdir_fh.close()
2079                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2080                             found = in_otherdir
2081                             self.pkg.orig_tar_gz = in_otherdir
2082
2083                     if not found:
2084                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2085                         self.pkg.orig_tar_gz = -1
2086                         continue
2087             else:
2088                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2089                 continue
2090             if actual_md5 != dsc_entry["md5sum"]:
2091                 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2092             if actual_size != int(dsc_entry["size"]):
2093                 self.rejects.append("size for %s doesn't match %s." % (found, file))
2094
2095     ################################################################################
2096     def accepted_checks(self, overwrite_checks=True, session=None):
2097         # Recheck anything that relies on the database; since that's not
2098         # frozen between accept and our run time when called from p-a.
2099
2100         # overwrite_checks is set to False when installing to stable/oldstable
2101
2102         if session is None:
2103             session = DBConn().session()
2104
2105         propogate={}
2106         nopropogate={}
2107
2108         for checkfile in self.pkg.files.keys():
2109             # The .orig.tar.gz can disappear out from under us is it's a
2110             # duplicate of one in the archive.
2111             if not self.pkg.files.has_key(checkfile):
2112                 continue
2113
2114             entry = self.pkg.files[checkfile]
2115
2116             # Check that the source still exists
2117             if entry["type"] == "deb":
2118                 source_version = entry["source version"]
2119                 source_package = entry["source package"]
2120                 if not self.pkg.changes["architecture"].has_key("source") \
2121                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2122                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2123
2124             # Version and file overwrite checks
2125             if overwrite_checks:
2126                 if entry["type"] == "deb":
2127                     self.check_binary_against_db(checkfile, session)
2128                 elif entry["type"] == "dsc":
2129                     self.check_source_against_db(checkfile, session)
2130                     self.check_dsc_against_db(dsc_filename, session)
2131
2132             # propogate in the case it is in the override tables:
2133             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2134                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2135                     propogate[suite] = 1
2136                 else:
2137                     nopropogate[suite] = 1
2138
2139         for suite in propogate.keys():
2140             if suite in nopropogate:
2141                 continue
2142             self.pkg.changes["distribution"][suite] = 1
2143
2144         for checkfile in self.pkg.files.keys():
2145             # Check the package is still in the override tables
2146             for suite in self.pkg.changes["distribution"].keys():
2147                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2148                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2149
2150     ################################################################################
2151     # This is not really a reject, but an unaccept, but since a) the code for
2152     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2153     # extremely rare, for now we'll go with whining at our admin folks...
2154
2155     def do_unaccept(self):
2156         cnf = Config()
2157
2158         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2159         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2160         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2161         self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2162         if cnf.has_key("Dinstall::Bcc"):
2163             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2164
2165         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2166
2167         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2168
2169         # Write the rejection email out as the <foo>.reason file
2170         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2171         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2172
2173         # If we fail here someone is probably trying to exploit the race
2174         # so let's just raise an exception ...
2175         if os.path.exists(reject_filename):
2176             os.unlink(reject_filename)
2177
2178         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2179         os.write(fd, reject_mail_message)
2180         os.close(fd)
2181
2182         utils.send_mail(reject_mail_message)
2183
2184         del self.Subst["__REJECTOR_ADDRESS__"]
2185         del self.Subst["__REJECT_MESSAGE__"]
2186         del self.Subst["__CC__"]
2187
2188     ################################################################################
2189     # If any file of an upload has a recent mtime then chances are good
2190     # the file is still being uploaded.
2191
2192     def upload_too_new(self):
2193         cnf = Config()
2194         too_new = False
2195         # Move back to the original directory to get accurate time stamps
2196         cwd = os.getcwd()
2197         os.chdir(self.pkg.directory)
2198         file_list = self.pkg.files.keys()
2199         file_list.extend(self.pkg.dsc_files.keys())
2200         file_list.append(self.pkg.changes_file)
2201         for f in file_list:
2202             try:
2203                 last_modified = time.time()-os.path.getmtime(f)
2204                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2205                     too_new = True
2206                     break
2207             except:
2208                 pass
2209
2210         os.chdir(cwd)
2211         return too_new