]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Port p-u to SQLA and move logic to daklib/queue.py
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import cPickle
30 import errno
31 import os
32 import pg
33 import stat
34 import sys
35 import time
36 import apt_inst
37 import apt_pkg
38 import utils
39 from types import *
40
41 from dak_exceptions import *
42 from changes import *
43 from regexes import *
44 from config import Config
45 from holding import Holding
46 from dbconn import *
47 from summarystats import SummaryStats
48 from utils import parse_changes
49 from textutils import fix_maintainer
50
51 ###############################################################################
52
53 def get_type(f, session=None):
54     """
55     Get the file type of C{f}
56
57     @type f: dict
58     @param f: file entry from Changes object
59
60     @rtype: string
61     @return: filetype
62
63     """
64     if session is None:
65         session = DBConn().session()
66
67     # Determine the type
68     if f.has_key("dbtype"):
69         file_type = file["dbtype"]
70     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
71         file_type = "dsc"
72     else:
73         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
74
75     # Validate the override type
76     type_id = get_override_type(file_type, session)
77     if type_id is None:
78         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
79
80     return file_type
81
82 ################################################################################
83
84 # Determine what parts in a .changes are NEW
85
86 def determine_new(changes, files, warn=1):
87     """
88     Determine what parts in a C{changes} file are NEW.
89
90     @type changes: Upload.Pkg.changes dict
91     @param changes: Changes dictionary
92
93     @type files: Upload.Pkg.files dict
94     @param files: Files dictionary
95
96     @type warn: bool
97     @param warn: Warn if overrides are added for (old)stable
98
99     @rtype: dict
100     @return: dictionary of NEW components.
101
102     """
103     new = {}
104
105     session = DBConn().session()
106
107     # Build up a list of potentially new things
108     for name, f in files.items():
109         # Skip byhand elements
110         if f["type"] == "byhand":
111             continue
112         pkg = f["package"]
113         priority = f["priority"]
114         section = f["section"]
115         file_type = get_type(f)
116         component = f["component"]
117
118         if file_type == "dsc":
119             priority = "source"
120
121         if not new.has_key(pkg):
122             new[pkg] = {}
123             new[pkg]["priority"] = priority
124             new[pkg]["section"] = section
125             new[pkg]["type"] = file_type
126             new[pkg]["component"] = component
127             new[pkg]["files"] = []
128         else:
129             old_type = new[pkg]["type"]
130             if old_type != file_type:
131                 # source gets trumped by deb or udeb
132                 if old_type == "dsc":
133                     new[pkg]["priority"] = priority
134                     new[pkg]["section"] = section
135                     new[pkg]["type"] = file_type
136                     new[pkg]["component"] = component
137
138         new[pkg]["files"].append(name)
139
140         if f.has_key("othercomponents"):
141             new[pkg]["othercomponents"] = f["othercomponents"]
142
143     for suite in changes["suite"].keys():
144         for pkg in new.keys():
145             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
146             if len(ql) > 0:
147                 for file_entry in new[pkg]["files"]:
148                     if files[file_entry].has_key("new"):
149                         del files[file_entry]["new"]
150                 del new[pkg]
151
152     if warn:
153         for s in ['stable', 'oldstable']:
154             if changes["suite"].has_key(s):
155                 print "WARNING: overrides will be added for %s!" % s
156         for pkg in new.keys():
157             if new[pkg].has_key("othercomponents"):
158                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
159
160     return new
161
162 ################################################################################
163
164 def check_valid(new):
165     """
166     Check if section and priority for NEW packages exist in database.
167     Additionally does sanity checks:
168       - debian-installer packages have to be udeb (or source)
169       - non debian-installer packages can not be udeb
170       - source priority can only be assigned to dsc file types
171
172     @type new: dict
173     @param new: Dict of new packages with their section, priority and type.
174
175     """
176     for pkg in new.keys():
177         section_name = new[pkg]["section"]
178         priority_name = new[pkg]["priority"]
179         file_type = new[pkg]["type"]
180
181         section = get_section(section_name)
182         if section is None:
183             new[pkg]["section id"] = -1
184         else:
185             new[pkg]["section id"] = section.section_id
186
187         priority = get_priority(priority_name)
188         if priority is None:
189             new[pkg]["priority id"] = -1
190         else:
191             new[pkg]["priority id"] = priority.priority_id
192
193         # Sanity checks
194         di = section_name.find("debian-installer") != -1
195
196         # If d-i, we must be udeb and vice-versa
197         if     (di and file_type not in ("udeb", "dsc")) or \
198            (not di and file_type == "udeb"):
199             new[pkg]["section id"] = -1
200
201         # If dsc we need to be source and vice-versa
202         if (priority == "source" and file_type != "dsc") or \
203            (priority != "source" and file_type == "dsc"):
204             new[pkg]["priority id"] = -1
205
206 ###############################################################################
207
208 def lookup_uid_from_fingerprint(fpr, session):
209     uid = None
210     uid_name = ""
211     # This is a stupid default, but see the comments below
212     is_dm = False
213
214     user = get_uid_from_fingerprint(changes["fingerprint"], session)
215
216     if user is not None:
217         uid = user.uid
218         if user.name is None:
219             uid_name = ''
220         else:
221             uid_name = user.name
222
223         # Check the relevant fingerprint (which we have to have)
224         for f in uid.fingerprint:
225             if f.fingerprint == changes['fingerprint']:
226                 is_dm = f.keyring.debian_maintainer
227                 break
228
229     return (uid, uid_name, is_dm)
230
231 ###############################################################################
232
233 # Used by Upload.check_timestamps
234 class TarTime(object):
235     def __init__(self, future_cutoff, past_cutoff):
236         self.reset()
237         self.future_cutoff = future_cutoff
238         self.past_cutoff = past_cutoff
239
240     def reset(self):
241         self.future_files = {}
242         self.ancient_files = {}
243
244     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
245         if MTime > self.future_cutoff:
246             self.future_files[Name] = MTime
247         if MTime < self.past_cutoff:
248             self.ancient_files[Name] = MTime
249
250 ###############################################################################
251
252 class Upload(object):
253     """
254     Everything that has to do with an upload processed.
255
256     """
257     def __init__(self):
258         self.pkg = Changes()
259         self.reset()
260
261     ###########################################################################
262
263     def reset (self):
264         """ Reset a number of internal variables."""
265
266         # Initialize the substitution template map
267         cnf = Config()
268         self.Subst = {}
269         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
270         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
271         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
272         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
273
274         self.rejects = []
275         self.warnings = []
276         self.notes = []
277
278         self.pkg.reset()
279
280     def package_info(self):
281         msg = ''
282
283         if len(self.rejects) > 0:
284             msg += "Reject Reasons:\n"
285             msg += "\n".join(self.rejects)
286
287         if len(self.warnings) > 0:
288             msg += "Warnings:\n"
289             msg += "\n".join(self.warnings)
290
291         if len(self.notes) > 0:
292             msg += "Notes:\n"
293             msg += "\n".join(self.notes)
294
295         return msg
296
297     ###########################################################################
298     def update_subst(self):
299         """ Set up the per-package template substitution mappings """
300
301         cnf = Config()
302
303         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
304         if not self.pkg.changes.has_key("architecture") or not \
305            isinstance(changes["architecture"], DictType):
306             self.pkg.changes["architecture"] = { "Unknown" : "" }
307
308         # and maintainer2047 may not exist.
309         if not self.pkg.changes.has_key("maintainer2047"):
310             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
311
312         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
313         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
314         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
315
316         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
317         if self.pkg.changes["architecture"].has_key("source") and \
318            self.pkg.changes["changedby822"] != "" and \
319            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
320
321             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
322             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
323             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
324         else:
325             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
326             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
327             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
328
329         if "sponsoremail" in self.pkg.changes:
330             self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
331
332         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
333             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
334
335         # Apply any global override of the Maintainer field
336         if cnf.get("Dinstall::OverrideMaintainer"):
337             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
338             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
339
340         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
341         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
342         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
343
344     ###########################################################################
345     def load_changes(self, filename):
346         """
347         @rtype boolean
348         @rvalue: whether the changes file was valid or not.  We may want to
349                  reject even if this is True (see what gets put in self.rejects).
350                  This is simply to prevent us even trying things later which will
351                  fail because we couldn't properly parse the file.
352         """
353         self.pkg.changes_file = filename
354
355         # Parse the .changes field into a dictionary
356         try:
357             self.pkg.changes.update(parse_changes(filename))
358         except CantOpenError:
359             self.rejects.append("%s: can't read file." % (filename))
360             return False
361         except ParseChangesError, line:
362             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
363             return False
364         except ChangesUnicodeError:
365             self.rejects.append("%s: changes file not proper utf-8" % (filename))
366             return False
367
368         # Parse the Files field from the .changes into another dictionary
369         try:
370             self.pkg.files.update(build_file_list(self.pkg.changes))
371         except ParseChangesError, line:
372             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373             return False
374         except UnknownFormatError, format:
375             self.rejects.append("%s: unknown format '%s'." % (filename, format))
376             return False
377
378         # Check for mandatory fields
379         for i in ("distribution", "source", "binary", "architecture",
380                   "version", "maintainer", "files", "changes", "description"):
381             if not self.pkg.changes.has_key(i):
382                 # Avoid undefined errors later
383                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
384                 return False
385
386         # Strip a source version in brackets from the source field
387         if re_strip_srcver.search(self.pkg.changes["source"]):
388             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
389
390         # Ensure the source field is a valid package name.
391         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
392             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
393
394         # Split multi-value fields into a lower-level dictionary
395         for i in ("architecture", "distribution", "binary", "closes"):
396             o = self.pkg.changes.get(i, "")
397             if o != "":
398                 del self.pkg.changes[i]
399
400             self.pkg.changes[i] = {}
401
402             for j in o.split():
403                 self.pkg.changes[i][j] = 1
404
405         # Fix the Maintainer: field to be RFC822/2047 compatible
406         try:
407             (self.pkg.changes["maintainer822"],
408              self.pkg.changes["maintainer2047"],
409              self.pkg.changes["maintainername"],
410              self.pkg.changes["maintaineremail"]) = \
411                    fix_maintainer (self.pkg.changes["maintainer"])
412         except ParseMaintError, msg:
413             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
414                    % (filename, changes["maintainer"], msg))
415
416         # ...likewise for the Changed-By: field if it exists.
417         try:
418             (self.pkg.changes["changedby822"],
419              self.pkg.changes["changedby2047"],
420              self.pkg.changes["changedbyname"],
421              self.pkg.changes["changedbyemail"]) = \
422                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
423         except ParseMaintError, msg:
424             self.pkg.changes["changedby822"] = ""
425             self.pkg.changes["changedby2047"] = ""
426             self.pkg.changes["changedbyname"] = ""
427             self.pkg.changes["changedbyemail"] = ""
428
429             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
430                    % (filename, changes["changed-by"], msg))
431
432         # Ensure all the values in Closes: are numbers
433         if self.pkg.changes.has_key("closes"):
434             for i in self.pkg.changes["closes"].keys():
435                 if re_isanum.match (i) == None:
436                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
437
438         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
439         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
440         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
441
442         # Check there isn't already a changes file of the same name in one
443         # of the queue directories.
444         base_filename = os.path.basename(filename)
445         for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
446             if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename):
447                 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
448
449         # Check the .changes is non-empty
450         if not self.pkg.files:
451             self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
452             return False
453
454         # Changes was syntactically valid even if we'll reject
455         return True
456
457     ###########################################################################
458
459     def check_distributions(self):
460         "Check and map the Distribution field"
461
462         Cnf = Config()
463
464         # Handle suite mappings
465         for m in Cnf.ValueList("SuiteMappings"):
466             args = m.split()
467             mtype = args[0]
468             if mtype == "map" or mtype == "silent-map":
469                 (source, dest) = args[1:3]
470                 if self.pkg.changes["distribution"].has_key(source):
471                     del self.pkg.changes["distribution"][source]
472                     self.pkg.changes["distribution"][dest] = 1
473                     if mtype != "silent-map":
474                         self.notes.append("Mapping %s to %s." % (source, dest))
475                 if self.pkg.changes.has_key("distribution-version"):
476                     if self.pkg.changes["distribution-version"].has_key(source):
477                         self.pkg.changes["distribution-version"][source]=dest
478             elif mtype == "map-unreleased":
479                 (source, dest) = args[1:3]
480                 if self.pkg.changes["distribution"].has_key(source):
481                     for arch in self.pkg.changes["architecture"].keys():
482                         if arch not in [ arch_string for a in get_suite_architectures(source) ]:
483                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
484                             del self.pkg.changes["distribution"][source]
485                             self.pkg.changes["distribution"][dest] = 1
486                             break
487             elif mtype == "ignore":
488                 suite = args[1]
489                 if self.pkg.changes["distribution"].has_key(suite):
490                     del self.pkg.changes["distribution"][suite]
491                     self.warnings.append("Ignoring %s as a target suite." % (suite))
492             elif mtype == "reject":
493                 suite = args[1]
494                 if self.pkg.changes["distribution"].has_key(suite):
495                     self.rejects.append("Uploads to %s are not accepted." % (suite))
496             elif mtype == "propup-version":
497                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
498                 #
499                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
500                 if self.pkg.changes["distribution"].has_key(args[1]):
501                     self.pkg.changes.setdefault("distribution-version", {})
502                     for suite in args[2:]:
503                         self.pkg.changes["distribution-version"][suite] = suite
504
505         # Ensure there is (still) a target distribution
506         if len(self.pkg.changes["distribution"].keys()) < 1:
507             self.rejects.append("No valid distribution remaining.")
508
509         # Ensure target distributions exist
510         for suite in self.pkg.changes["distribution"].keys():
511             if not Cnf.has_key("Suite::%s" % (suite)):
512                 self.rejects.append("Unknown distribution `%s'." % (suite))
513
514     ###########################################################################
515
516     def binary_file_checks(self, f, session):
517         cnf = Config()
518         entry = self.pkg.files[f]
519
520         # Extract package control information
521         deb_file = utils.open_file(f)
522         try:
523             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
524         except:
525             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
526             deb_file.close()
527             # Can't continue, none of the checks on control would work.
528             return
529
530         # Check for mandantory "Description:"
531         deb_file.seek(0)
532         try:
533             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
534         except:
535             self.rejects.append("%s: Missing Description in binary package" % (f))
536             return
537
538         deb_file.close()
539
540         # Check for mandatory fields
541         for field in [ "Package", "Architecture", "Version" ]:
542             if control.Find(field) == None:
543                 # Can't continue
544                 self.rejects.append("%s: No %s field in control." % (f, field))
545                 return
546
547         # Ensure the package name matches the one give in the .changes
548         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
549             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
550
551         # Validate the package field
552         package = control.Find("Package")
553         if not re_valid_pkg_name.match(package):
554             self.rejects.append("%s: invalid package name '%s'." % (f, package))
555
556         # Validate the version field
557         version = control.Find("Version")
558         if not re_valid_version.match(version):
559             self.rejects.append("%s: invalid version number '%s'." % (f, version))
560
561         # Ensure the architecture of the .deb is one we know about.
562         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
563         architecture = control.Find("Architecture")
564         upload_suite = self.pkg.changes["distribution"].keys()[0]
565
566         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
567             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
568             self.rejects.append("Unknown architecture '%s'." % (architecture))
569
570         # Ensure the architecture of the .deb is one of the ones
571         # listed in the .changes.
572         if not self.pkg.changes["architecture"].has_key(architecture):
573             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
574
575         # Sanity-check the Depends field
576         depends = control.Find("Depends")
577         if depends == '':
578             self.rejects.append("%s: Depends field is empty." % (f))
579
580         # Sanity-check the Provides field
581         provides = control.Find("Provides")
582         if provides:
583             provide = re_spacestrip.sub('', provides)
584             if provide == '':
585                 self.rejects.append("%s: Provides field is empty." % (f))
586             prov_list = provide.split(",")
587             for prov in prov_list:
588                 if not re_valid_pkg_name.match(prov):
589                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
590
591         # Check the section & priority match those given in the .changes (non-fatal)
592         if     control.Find("Section") and entry["section"] != "" \
593            and entry["section"] != control.Find("Section"):
594             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
595                                 (f, control.Find("Section", ""), entry["section"]))
596         if control.Find("Priority") and entry["priority"] != "" \
597            and entry["priority"] != control.Find("Priority"):
598             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
599                                 (f, control.Find("Priority", ""), entry["priority"]))
600
601         entry["package"] = package
602         entry["architecture"] = architecture
603         entry["version"] = version
604         entry["maintainer"] = control.Find("Maintainer", "")
605
606         if f.endswith(".udeb"):
607             files[f]["dbtype"] = "udeb"
608         elif f.endswith(".deb"):
609             files[f]["dbtype"] = "deb"
610         else:
611             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
612
613         entry["source"] = control.Find("Source", entry["package"])
614
615         # Get the source version
616         source = entry["source"]
617         source_version = ""
618
619         if source.find("(") != -1:
620             m = re_extract_src_version.match(source)
621             source = m.group(1)
622             source_version = m.group(2)
623
624         if not source_version:
625             source_version = files[f]["version"]
626
627         entry["source package"] = source
628         entry["source version"] = source_version
629
630         # Ensure the filename matches the contents of the .deb
631         m = re_isadeb.match(f)
632
633         #  package name
634         file_package = m.group(1)
635         if entry["package"] != file_package:
636             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
637                                 (f, file_package, entry["dbtype"], entry["package"]))
638         epochless_version = re_no_epoch.sub('', control.Find("Version"))
639
640         #  version
641         file_version = m.group(2)
642         if epochless_version != file_version:
643             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
644                                 (f, file_version, entry["dbtype"], epochless_version))
645
646         #  architecture
647         file_architecture = m.group(3)
648         if entry["architecture"] != file_architecture:
649             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
650                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
651
652         # Check for existent source
653         source_version = entry["source version"]
654         source_package = entry["source package"]
655         if self.pkg.changes["architecture"].has_key("source"):
656             if source_version != self.pkg.changes["version"]:
657                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
658                                     (source_version, f, self.pkg.changes["version"]))
659         else:
660             # Check in the SQL database
661             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
662                 # Check in one of the other directories
663                 source_epochless_version = re_no_epoch.sub('', source_version)
664                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
665                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
666                     entry["byhand"] = 1
667                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
668                     entry["new"] = 1
669                 else:
670                     dsc_file_exists = False
671                     for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
672                         if cnf.has_key("Dir::Queue::%s" % (myq)):
673                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
674                                 dsc_file_exists = True
675                                 break
676
677                     if not dsc_file_exists:
678                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
679
680         # Check the version and for file overwrites
681         self.check_binary_against_db(f, session)
682
683         b = Binary(f).scan_package()
684         if len(b.rejects) > 0:
685             for j in b.rejects:
686                 self.rejects.append(j)
687
688     def source_file_checks(self, f, session):
689         entry = self.pkg.files[f]
690
691         m = re_issource.match(f)
692         if not m:
693             return
694
695         entry["package"] = m.group(1)
696         entry["version"] = m.group(2)
697         entry["type"] = m.group(3)
698
699         # Ensure the source package name matches the Source filed in the .changes
700         if self.pkg.changes["source"] != entry["package"]:
701             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
702
703         # Ensure the source version matches the version in the .changes file
704         if entry["type"] == "orig.tar.gz":
705             changes_version = self.pkg.changes["chopversion2"]
706         else:
707             changes_version = self.pkg.changes["chopversion"]
708
709         if changes_version != entry["version"]:
710             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
711
712         # Ensure the .changes lists source in the Architecture field
713         if not self.pkg.changes["architecture"].has_key("source"):
714             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
715
716         # Check the signature of a .dsc file
717         if entry["type"] == "dsc":
718             # check_signature returns either:
719             #  (None, [list, of, rejects]) or (signature, [])
720             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
721             for j in rejects:
722                 self.rejects.append(j)
723
724         entry["architecture"] = "source"
725
726     def per_suite_file_checks(self, f, suite, session):
727         cnf = Config()
728         entry = self.pkg.files[f]
729
730         # Skip byhand
731         if entry.has_key("byhand"):
732             return
733
734         # Handle component mappings
735         for m in cnf.ValueList("ComponentMappings"):
736             (source, dest) = m.split()
737             if entry["component"] == source:
738                 entry["original component"] = source
739                 entry["component"] = dest
740
741         # Ensure the component is valid for the target suite
742         if cnf.has_key("Suite:%s::Components" % (suite)) and \
743            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
744             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
745             return
746
747         # Validate the component
748         component = entry["component"]
749         if not get_component(component, session):
750             self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
751             return
752
753         # See if the package is NEW
754         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
755             entry["new"] = 1
756
757         # Validate the priority
758         if entry["priority"].find('/') != -1:
759             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
760
761         # Determine the location
762         location = cnf["Dir::Pool"]
763         l = get_location(location, component, archive, session)
764         if l is None:
765             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
766             entry["location id"] = -1
767         else:
768             entry["location id"] = l.location_id
769
770         # Check the md5sum & size against existing files (if any)
771         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
772
773         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
774                                          entry["size"], entry["md5sum"], entry["location id"])
775
776         if found is None:
777             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
778         elif found is False and poolfile is not None:
779             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
780         else:
781             if poolfile is None:
782                 entry["files id"] = None
783             else:
784                 entry["files id"] = poolfile.file_id
785
786         # Check for packages that have moved from one component to another
787         entry['suite'] = suite
788         res = get_binary_components(files[f]['package'], suite, entry["architecture"], session)
789         if res.rowcount > 0:
790             entry["othercomponents"] = res.fetchone()[0]
791
792     def check_files(self, action=True):
793         archive = utils.where_am_i()
794         file_keys = self.pkg.files.keys()
795         holding = Holding()
796         cnf = Config()
797
798         # XXX: As far as I can tell, this can no longer happen - see
799         #      comments by AJ in old revisions - mhy
800         # if reprocess is 2 we've already done this and we're checking
801         # things again for the new .orig.tar.gz.
802         # [Yes, I'm fully aware of how disgusting this is]
803         if action and self.reprocess < 2:
804             cwd = os.getcwd()
805             os.chdir(self.pkg.directory)
806             for f in file_keys:
807                 ret = holding.copy_to_holding(f)
808                 if ret is not None:
809                     # XXX: Should we bail out here or try and continue?
810                     self.rejects.append(ret)
811
812             os.chdir(cwd)
813
814         # Check there isn't already a .changes or .dak file of the same name in
815         # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
816         # [NB: this check must be done post-suite mapping]
817         base_filename = os.path.basename(self.pkg.changes_file)
818         dot_dak_filename = base_filename[:-8] + ".dak"
819
820         for suite in self.pkg.changes["distribution"].keys():
821             copychanges = "Suite::%s::CopyChanges" % (suite)
822             if cnf.has_key(copychanges) and \
823                    os.path.exists(os.path.join(cnf[copychanges], base_filename)):
824                 self.rejects.append("%s: a file with this name already exists in %s" \
825                            % (base_filename, cnf[copychanges]))
826
827             copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
828             if cnf.has_key(copy_dot_dak) and \
829                    os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
830                 self.rejects.append("%s: a file with this name already exists in %s" \
831                            % (dot_dak_filename, Cnf[copy_dot_dak]))
832
833         self.reprocess = 0
834         has_binaries = False
835         has_source = False
836
837         s = DBConn().session()
838
839         for f, entry in self.pkg.files.items():
840             # Ensure the file does not already exist in one of the accepted directories
841             for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
842                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
843                 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
844                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
845
846             if not re_taint_free.match(f):
847                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
848
849             # Check the file is readable
850             if os.access(f, os.R_OK) == 0:
851                 # When running in -n, copy_to_holding() won't have
852                 # generated the reject_message, so we need to.
853                 if action:
854                     if os.path.exists(f):
855                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
856                     else:
857                         self.rejects.append("Can't read `%s'. [file not found]" % (f))
858                 entry["type"] = "unreadable"
859                 continue
860
861             # If it's byhand skip remaining checks
862             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
863                 entry["byhand"] = 1
864                 entry["type"] = "byhand"
865
866             # Checks for a binary package...
867             elif re_isadeb.match(f):
868                 has_binaries = True
869                 entry["type"] = "deb"
870
871                 # This routine appends to self.rejects/warnings as appropriate
872                 self.binary_file_checks(f, session)
873
874             # Checks for a source package...
875             elif re_issource.match(f)
876                 has_source = True
877
878                 # This routine appends to self.rejects/warnings as appropriate
879                 self.source_file_checks(f, session)
880
881             # Not a binary or source package?  Assume byhand...
882             else:
883                 entry["byhand"] = 1
884                 entry["type"] = "byhand"
885
886             # Per-suite file checks
887             entry["oldfiles"] = {}
888             for suite in self.pkg.changes["distribution"].keys():
889                 self.per_suite_file_checks(f, suite, session)
890
891         # If the .changes file says it has source, it must have source.
892         if self.pkg.changes["architecture"].has_key("source"):
893             if not has_source:
894                 self.rejects.append("no source found and Architecture line in changes mention source.")
895
896             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
897                 self.rejects.append("source only uploads are not supported.")
898
899     ###########################################################################
900     def check_dsc(self, action=True):
901         """Returns bool indicating whether or not the source changes are valid"""
902         # Ensure there is source to check
903         if not self.pkg.changes["architecture"].has_key("source"):
904             return True
905
906         # Find the .dsc
907         dsc_filename = None
908         for f, entry in self.pkg.files.items():
909             if entry["type"] == "dsc":
910                 if dsc_filename:
911                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
912                     return False
913                 else:
914                     dsc_filename = f
915
916         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
917         if not dsc_filename:
918             self.rejects.append("source uploads must contain a dsc file")
919             return False
920
921         # Parse the .dsc file
922         try:
923             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
924         except CantOpenError:
925             # if not -n copy_to_holding() will have done this for us...
926             if not action:
927                 self.rejects.append("%s: can't read file." % (dsc_filename))
928         except ParseChangesError, line:
929             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
930         except InvalidDscError, line:
931             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
932         except ChangesUnicodeError:
933             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
934
935         # Build up the file list of files mentioned by the .dsc
936         try:
937             self.pkg.dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
938         except NoFilesFieldError:
939             self.rejects.append("%s: no Files: field." % (dsc_filename))
940             return False
941         except UnknownFormatError, format:
942             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
943             return False
944         except ParseChangesError, line:
945             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
946             return False
947
948         # Enforce mandatory fields
949         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
950             if not self.pkg.dsc.has_key(i):
951                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
952                 return False
953
954         # Validate the source and version fields
955         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
956             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
957         if not re_valid_version.match(dsc["version"]):
958             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
959
960         # Bumping the version number of the .dsc breaks extraction by stable's
961         # dpkg-source.  So let's not do that...
962         if self.pkg.dsc["format"] != "1.0":
963             self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
964
965         # Validate the Maintainer field
966         try:
967             # We ignore the return value
968             fix_maintainer(self.pkg.dsc["maintainer"])
969         except ParseMaintError, msg:
970             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
971                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
972
973         # Validate the build-depends field(s)
974         for field_name in [ "build-depends", "build-depends-indep" ]:
975             field = self.pkg.dsc.get(field_name)
976             if field:
977                 # Check for broken dpkg-dev lossage...
978                 if field.startswith("ARRAY"):
979                     self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
980                                         (dsc_filename, field_name.title()))
981
982                 # Have apt try to parse them...
983                 try:
984                     apt_pkg.ParseSrcDepends(field)
985                 except:
986                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
987
988         # Ensure the version number in the .dsc matches the version number in the .changes
989         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
990         changes_version = self.pkg.files[dsc_filename]["version"]
991
992         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
993             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
994
995         # Ensure there is a .tar.gz in the .dsc file
996         has_tar = False
997         for f in dsc_files.keys():
998             m = re_issource.match(f)
999             if not m:
1000                 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1001                 continue
1002             ftype = m.group(3)
1003             if ftype == "orig.tar.gz" or ftype == "tar.gz":
1004                 has_tar = True
1005
1006         if not has_tar:
1007             self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1008
1009         # Ensure source is newer than existing source in target suites
1010         self.check_source_against_db(dsc_filename, session)
1011
1012         self.check_dsc_against_db(dsc_filename)
1013
1014         return True
1015
1016     ###########################################################################
1017
1018     def get_changelog_versions(self, source_dir):
1019         """Extracts a the source package and (optionally) grabs the
1020         version history out of debian/changelog for the BTS."""
1021
1022         cnf = Config()
1023
1024         # Find the .dsc (again)
1025         dsc_filename = None
1026         for f in self.files.keys():
1027             if files[f]["type"] == "dsc":
1028                 dsc_filename = f
1029
1030         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1031         if not dsc_filename:
1032             return
1033
1034         # Create a symlink mirror of the source files in our temporary directory
1035         for f in self.files.keys():
1036             m = re_issource.match(f)
1037             if m:
1038                 src = os.path.join(source_dir, f)
1039                 # If a file is missing for whatever reason, give up.
1040                 if not os.path.exists(src):
1041                     return
1042                 ftype = m.group(3)
1043                 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1044                     continue
1045                 dest = os.path.join(os.getcwd(), f)
1046                 os.symlink(src, dest)
1047
1048         # If the orig.tar.gz is not a part of the upload, create a symlink to the
1049         # existing copy.
1050         if self.pkg.orig_tar_gz:
1051             dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1052             os.symlink(self.pkg.orig_tar_gz, dest)
1053
1054         # Extract the source
1055         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1056         (result, output) = commands.getstatusoutput(cmd)
1057         if (result != 0):
1058             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1059             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1060             return
1061
1062         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1063             return
1064
1065         # Get the upstream version
1066         upstr_version = re_no_epoch.sub('', dsc["version"])
1067         if re_strip_revision.search(upstr_version):
1068             upstr_version = re_strip_revision.sub('', upstr_version)
1069
1070         # Ensure the changelog file exists
1071         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1072         if not os.path.exists(changelog_filename):
1073             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1074             return
1075
1076         # Parse the changelog
1077         self.pkg.dsc["bts changelog"] = ""
1078         changelog_file = utils.open_file(changelog_filename)
1079         for line in changelog_file.readlines():
1080             m = re_changelog_versions.match(line)
1081             if m:
1082                 self.pkg.dsc["bts changelog"] += line
1083         changelog_file.close()
1084
1085         # Check we found at least one revision in the changelog
1086         if not self.pkg.dsc["bts changelog"]:
1087             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1088
1089     def check_source(self):
1090         # XXX: I'm fairly sure reprocess == 2 can never happen
1091         #      AJT disabled the is_incoming check years ago - mhy
1092         #      We should probably scrap or rethink the whole reprocess thing
1093         # Bail out if:
1094         #    a) there's no source
1095         # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1096         # or c) the orig.tar.gz is MIA
1097         if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1098            or self.pkg.orig_tar_gz == -1:
1099             return
1100
1101         tmpdir = utils.temp_dirname()
1102
1103         # Move into the temporary directory
1104         cwd = os.getcwd()
1105         os.chdir(tmpdir)
1106
1107         # Get the changelog version history
1108         self.get_changelog_versions(cwd)
1109
1110         # Move back and cleanup the temporary tree
1111         os.chdir(cwd)
1112
1113         try:
1114             shutil.rmtree(tmpdir)
1115         except OSError, e:
1116             if e.errno != errno.EACCES:
1117                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1118
1119             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1120             # We probably have u-r or u-w directories so chmod everything
1121             # and try again.
1122             cmd = "chmod -R u+rwx %s" % (tmpdir)
1123             result = os.system(cmd)
1124             if result != 0:
1125                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1126             shutil.rmtree(tmpdir)
1127         except:
1128             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1129
1130     ###########################################################################
1131     def ensure_hashes(self):
1132         # Make sure we recognise the format of the Files: field in the .changes
1133         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1134         if len(format) == 2:
1135             format = int(format[0]), int(format[1])
1136         else:
1137             format = int(float(format[0])), 0
1138
1139         # We need to deal with the original changes blob, as the fields we need
1140         # might not be in the changes dict serialised into the .dak anymore.
1141         orig_changes = parse_deb822(self.pkg.changes['filecontents'])
1142
1143         # Copy the checksums over to the current changes dict.  This will keep
1144         # the existing modifications to it intact.
1145         for field in orig_changes:
1146             if field.startswith('checksums-'):
1147                 self.pkg.changes[field] = orig_changes[field]
1148
1149         # Check for unsupported hashes
1150         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1151             self.rejects.append(j)
1152
1153         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1154             self.rejects.append(j)
1155
1156         # We have to calculate the hash if we have an earlier changes version than
1157         # the hash appears in rather than require it exist in the changes file
1158         for hashname, hashfunc, version in utils.known_hashes:
1159             # TODO: Move _ensure_changes_hash into this class
1160             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1161                 self.rejects.append(j)
1162             if "source" in self.pkg.changes["architecture"]:
1163                 # TODO: Move _ensure_dsc_hash into this class
1164                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc))
1165                     self.rejects.append(j)
1166
1167     def check_hashes():
1168         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1169             self.rejects.append(m)
1170
1171         for m in utils.check_size(".changes", self.pkg.files):
1172             self.rejects.append(m)
1173
1174         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1175             self.rejects.append(m)
1176
1177         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1178             self.rejects.append(m)
1179
1180         for m in utils.ensure_hashes(self.pkg.changes, dsc, files, dsc_files):
1181             self.rejects.append(m)
1182
1183     ###########################################################################
1184     def check_urgency(self):
1185         cnf = Config()
1186         if self.pkg.changes["architecture"].has_key("source"):
1187             if not self.pkg.changes.has_key("urgency"):
1188                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1189             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1190             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1191                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1192                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1193                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1194
1195     ###########################################################################
1196
1197     # Sanity check the time stamps of files inside debs.
1198     # [Files in the near future cause ugly warnings and extreme time
1199     #  travel can cause errors on extraction]
1200
1201     def check_timestamps(self):
1202         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1203         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1204         tar = TarTime(future_cutoff, past_cutoff)
1205
1206         for filename, entry in self.pkg.files.keys():
1207             if entry["type"] == "deb":
1208                 tar.reset()
1209                 try:
1210                     deb_file = utils.open_file(filename)
1211                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1212                     deb_file.seek(0)
1213                     try:
1214                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1215                     except SystemError, e:
1216                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1217                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1218                             raise
1219                         deb_file.seek(0)
1220                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1221
1222                     deb_file.close()
1223
1224                     future_files = tar.future_files.keys()
1225                     if future_files:
1226                         num_future_files = len(future_files)
1227                         future_file = future_files[0]
1228                         future_date = tar.future_files[future_file]
1229                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1230                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1231
1232                     ancient_files = tar.ancient_files.keys()
1233                     if ancient_files:
1234                         num_ancient_files = len(ancient_files)
1235                         ancient_file = ancient_files[0]
1236                         ancient_date = tar.ancient_files[ancient_file]
1237                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1238                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1239                 except:
1240                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1241
1242     ###########################################################################
1243     def check_signed_by_key(self):
1244         """Ensure the .changes is signed by an authorized uploader."""
1245         session = DBConn().session()
1246
1247         (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1248
1249         # match claimed name with actual name:
1250         if uid is None:
1251             # This is fundamentally broken but need us to refactor how we get
1252             # the UIDs/Fingerprints in order for us to fix it properly
1253             uid, uid_email = self.pkg.changes["fingerprint"], uid
1254             may_nmu, may_sponsor = 1, 1
1255             # XXX by default new dds don't have a fingerprint/uid in the db atm,
1256             #     and can't get one in there if we don't allow nmu/sponsorship
1257         elif is_dm is False:
1258             # If is_dm is False, we allow full upload rights
1259             uid_email = "%s@debian.org" % (uid)
1260             may_nmu, may_sponsor = 1, 1
1261         else:
1262             # Assume limited upload rights unless we've discovered otherwise
1263             uid_email = uid
1264             may_nmu, may_sponsor = 0, 0
1265
1266         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1267             sponsored = 0
1268         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1269             sponsored = 0
1270             if uid_name == "": sponsored = 1
1271         else:
1272             sponsored = 1
1273             if ("source" in self.pkg.changes["architecture"] and
1274                 uid_email and utils.is_email_alias(uid_email)):
1275                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1276                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1277                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1278                     self.pkg.changes["sponsoremail"] = uid_email
1279
1280         if sponsored and not may_sponsor:
1281             self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1282
1283         if not sponsored and not may_nmu:
1284             should_reject = True
1285             highest_sid, highest_version = None, None
1286
1287             # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1288             #      It ignores higher versions with the dm_upload_allowed flag set to false
1289             #      I'm keeping the existing behaviour for now until I've gone back and
1290             #      checked exactly what the GR says - mhy
1291             for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1292                 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1293                      highest_sid = si.source_id
1294                      highest_version = si.version
1295
1296             if highest_sid is None:
1297                 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1298             else:
1299                 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1300                     (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1301                     if email == uid_email or name == uid_name:
1302                         should_reject = False
1303                         break
1304
1305             if should_reject is True:
1306                 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1307
1308             for b in self.pkg.changes["binary"].keys():
1309                 for suite in self.pkg.changes["distribution"].keys():
1310                     q = session.query(DBSource)
1311                     q = q.join(DBBinary).filter_by(package=b)
1312                     q = q.join(BinAssociation).join(Suite).filter_by(suite)
1313
1314                     for s in q.all():
1315                         if s.source != self.pkg.changes["source"]:
1316                             self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1317
1318             for f in self.pkg.files.keys():
1319                 if self.pkg.files[f].has_key("byhand"):
1320                     self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1321                 if self.pkg.files[f].has_key("new"):
1322                     self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1323
1324     ###########################################################################
1325     def build_summaries(self):
1326         """ Build a summary of changes the upload introduces. """
1327
1328         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1329
1330         short_summary = summary
1331
1332         # This is for direport's benefit...
1333         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1334
1335         if byhand or new:
1336             summary += "Changes: " + f
1337
1338         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1339
1340         summary += self.announce(short_summary, 0)
1341
1342         return (summary, short_summary)
1343
1344     ###########################################################################
1345
1346     def close_bugs(self, summary, action):
1347         """
1348         Send mail to close bugs as instructed by the closes field in the changes file.
1349         Also add a line to summary if any work was done.
1350
1351         @type summary: string
1352         @param summary: summary text, as given by L{build_summaries}
1353
1354         @type action: bool
1355         @param action: Set to false no real action will be done.
1356
1357         @rtype: string
1358         @return: summary. If action was taken, extended by the list of closed bugs.
1359
1360         """
1361
1362         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1363
1364         bugs = self.pkg.changes["closes"].keys()
1365
1366         if not bugs:
1367             return summary
1368
1369         bugs.sort()
1370         summary += "Closing bugs: "
1371         for bug in bugs:
1372             summary += "%s " % (bug)
1373             if action:
1374                 self.Subst["__BUG_NUMBER__"] = bug
1375                 if self.pkg.changes["distribution"].has_key("stable"):
1376                     self.Subst["__STABLE_WARNING__"] = """
1377 Note that this package is not part of the released stable Debian
1378 distribution.  It may have dependencies on other unreleased software,
1379 or other instabilities.  Please take care if you wish to install it.
1380 The update will eventually make its way into the next released Debian
1381 distribution."""
1382                 else:
1383                     self.Subst["__STABLE_WARNING__"] = ""
1384                     mail_message = utils.TemplateSubst(self.Subst, template)
1385                     utils.send_mail(mail_message)
1386
1387                 # Clear up after ourselves
1388                 del self.Subst["__BUG_NUMBER__"]
1389                 del self.Subst["__STABLE_WARNING__"]
1390
1391         if action:
1392             self.Logger.log(["closing bugs"] + bugs)
1393
1394         summary += "\n"
1395
1396         return summary
1397
1398     ###########################################################################
1399
1400     def announce(self, short_summary, action):
1401         """
1402         Send an announce mail about a new upload.
1403
1404         @type short_summary: string
1405         @param short_summary: Short summary text to include in the mail
1406
1407         @type action: bool
1408         @param action: Set to false no real action will be done.
1409
1410         @rtype: string
1411         @return: Textstring about action taken.
1412
1413         """
1414
1415         cnf = Config()
1416         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1417
1418         # Only do announcements for source uploads with a recent dpkg-dev installed
1419         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1420            self.pkg.changes["architecture"].has_key("source"):
1421             return ""
1422
1423         lists_done = {}
1424         summary = ""
1425
1426         self.Subst["__SHORT_SUMMARY__"] = short_summary
1427
1428         for dist in self.pkg.changes["distribution"].keys():
1429             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
1430             if announce_list == "" or lists_done.has_key(announce_list):
1431                 continue
1432
1433             lists_done[announce_list] = 1
1434             summary += "Announcing to %s\n" % (announce_list)
1435
1436             if action:
1437                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1438                 if cnf.get("Dinstall::TrackingServer") and \
1439                    self.pkg.changes["architecture"].has_key("source"):
1440                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1441                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1442
1443                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1444                 utils.send_mail(mail_message)
1445
1446                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1447
1448         if cnf.FindB("Dinstall::CloseBugs"):
1449             summary = self.close_bugs(summary, action)
1450
1451         del self.Subst["__SHORT_SUMMARY__"]
1452
1453         return summary
1454
1455     ###########################################################################
1456
1457     def accept (self, summary, short_summary, targetdir=None):
1458         """
1459         Accept an upload.
1460
1461         This moves all files referenced from the .changes into the I{accepted}
1462         queue, sends the accepted mail, announces to lists, closes bugs and
1463         also checks for override disparities. If enabled it will write out
1464         the version history for the BTS Version Tracking and will finally call
1465         L{queue_build}.
1466
1467         @type summary: string
1468         @param summary: Summary text
1469
1470         @type short_summary: string
1471         @param short_summary: Short summary
1472
1473         """
1474
1475         cnf = Config()
1476         stats = SummaryStats()
1477
1478         accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1479
1480         if targetdir is None:
1481             targetdir = cnf["Dir::Queue::Accepted"]
1482
1483         print "Accepting."
1484         self.Logger.log(["Accepting changes", self.pkg.changes_file])
1485
1486         self.write_dot_dak(targetdir)
1487
1488         # Move all the files into the accepted directory
1489         utils.move(self.pkg.changes_file, targetdir)
1490
1491         for name, entry in sorted(self.pkg.files.items()):
1492             utils.move(name, targetdir)
1493             stats.accept_bytes += float(entry["size"])
1494
1495         stats.accept_count += 1
1496
1497         # Send accept mail, announce to lists, close bugs and check for
1498         # override disparities
1499         if not cnf["Dinstall::Options::No-Mail"]:
1500             self.Subst["__SUITE__"] = ""
1501             self.Subst["__SUMMARY__"] = summary
1502             mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1503             utils.send_mail(mail_message)
1504             self.announce(short_summary, 1)
1505
1506         ## Helper stuff for DebBugs Version Tracking
1507         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1508             # ??? once queue/* is cleared on *.d.o and/or reprocessed
1509             # the conditionalization on dsc["bts changelog"] should be
1510             # dropped.
1511
1512             # Write out the version history from the changelog
1513             if self.pkg.changes["architecture"].has_key("source") and \
1514                self.pkg.dsc.has_key("bts changelog"):
1515
1516                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1517                 version_history = os.fdopen(fd, 'w')
1518                 version_history.write(self.pkg.dsc["bts changelog"])
1519                 version_history.close()
1520                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1521                                       self.pkg.changes_file[:-8]+".versions")
1522                 os.rename(temp_filename, filename)
1523                 os.chmod(filename, 0644)
1524
1525             # Write out the binary -> source mapping.
1526             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1527             debinfo = os.fdopen(fd, 'w')
1528             for name, entry in sorted(self.pkg.files.items()):
1529                 if entry["type"] == "deb":
1530                     line = " ".join([entry["package"], entry["version"],
1531                                      entry["architecture"], entry["source package"],
1532                                      entry["source version"]])
1533                     debinfo.write(line+"\n")
1534             debinfo.close()
1535             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1536                                   self.pkg.changes_file[:-8]+".debinfo")
1537             os.rename(temp_filename, filename)
1538             os.chmod(filename, 0644)
1539
1540         # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1541         # <Ganneff> we do call queue_build too
1542         # <mhy> well yes, we'd have had to if we were inserting into accepted
1543         # <Ganneff> now. thats database only.
1544         # <mhy> urgh, that's going to get messy
1545         # <Ganneff> so i make the p-n call to it *also* using accepted/
1546         # <mhy> but then the packages will be in the queue_build table without the files being there
1547         # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1548         # <mhy> ah, good point
1549         # <Ganneff> so it will work out, as unchecked move it over
1550         # <mhy> that's all completely sick
1551         # <Ganneff> yes
1552
1553         # This routine returns None on success or an error on failure
1554         res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1555         if res:
1556             utils.fubar(res)
1557
1558
1559     def check_override(self):
1560         """
1561         Checks override entries for validity. Mails "Override disparity" warnings,
1562         if that feature is enabled.
1563
1564         Abandons the check if
1565           - override disparity checks are disabled
1566           - mail sending is disabled
1567         """
1568
1569         cnf = Config()
1570
1571         # Abandon the check if:
1572         #  a) override disparity checks have been disabled
1573         #  b) we're not sending mail
1574         if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1575            cnf["Dinstall::Options::No-Mail"]:
1576             return
1577
1578         summary = self.pkg.check_override()
1579
1580         if summary == "":
1581             return
1582
1583         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1584
1585         self.Subst["__SUMMARY__"] = summary
1586         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1587         utils.send_mail(mail_message)
1588         del self.Subst["__SUMMARY__"]
1589
1590     ###########################################################################
1591
1592     def remove(self, dir=None):
1593         """
1594         Used (for instance) in p-u to remove the package from unchecked
1595         """
1596         if dir is None:
1597             os.chdir(self.pkg.directory)
1598         else:
1599             os.chdir(dir)
1600
1601         for f in self.pkg.files.keys():
1602             os.unlink(f)
1603         os.unlink(self.pkg.changes_file)
1604
1605     ###########################################################################
1606
1607     def move_to_dir (self, dest, perms=0660, changesperms=0664):
1608         """
1609         Move files to dest with certain perms/changesperms
1610         """
1611         utils.move(self.pkg.changes_file, dest, perms=changesperms)
1612         for f in self.pkg.files.keys():
1613             utils.move(f, dest, perms=perms)
1614
1615     ###########################################################################
1616
1617     def force_reject(self, reject_files):
1618         """
1619         Forcefully move files from the current directory to the
1620         reject directory.  If any file already exists in the reject
1621         directory it will be moved to the morgue to make way for
1622         the new file.
1623
1624         @type files: dict
1625         @param files: file dictionary
1626
1627         """
1628
1629         cnf = Config()
1630
1631         for file_entry in reject_files:
1632             # Skip any files which don't exist or which we don't have permission to copy.
1633             if os.access(file_entry, os.R_OK) == 0:
1634                 continue
1635
1636             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1637
1638             try:
1639                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1640             except OSError, e:
1641                 # File exists?  Let's try and move it to the morgue
1642                 if e.errno == errno.EEXIST:
1643                     morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1644                     try:
1645                         morgue_file = utils.find_next_free(morgue_file)
1646                     except NoFreeFilenameError:
1647                         # Something's either gone badly Pete Tong, or
1648                         # someone is trying to exploit us.
1649                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1650                         return
1651                     utils.move(dest_file, morgue_file, perms=0660)
1652                     try:
1653                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1654                     except OSError, e:
1655                         # Likewise
1656                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1657                         return
1658                 else:
1659                     raise
1660             # If we got here, we own the destination file, so we can
1661             # safely overwrite it.
1662             utils.move(file_entry, dest_file, 1, perms=0660)
1663             os.close(dest_fd)
1664
1665     ###########################################################################
1666     def do_reject (self, manual=0, reject_message="", note=""):
1667         """
1668         Reject an upload. If called without a reject message or C{manual} is
1669         true, spawn an editor so the user can write one.
1670
1671         @type manual: bool
1672         @param manual: manual or automated rejection
1673
1674         @type reject_message: string
1675         @param reject_message: A reject message
1676
1677         @return: 0
1678
1679         """
1680         # If we weren't given a manual rejection message, spawn an
1681         # editor so the user can add one in...
1682         if manual and not reject_message:
1683             (fd, temp_filename) = utils.temp_filename()
1684             temp_file = os.fdopen(fd, 'w')
1685             if len(note) > 0:
1686                 for line in note:
1687                     temp_file.write(line)
1688             temp_file.close()
1689             editor = os.environ.get("EDITOR","vi")
1690             answer = 'E'
1691             while answer == 'E':
1692                 os.system("%s %s" % (editor, temp_filename))
1693                 temp_fh = utils.open_file(temp_filename)
1694                 reject_message = "".join(temp_fh.readlines())
1695                 temp_fh.close()
1696                 print "Reject message:"
1697                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
1698                 prompt = "[R]eject, Edit, Abandon, Quit ?"
1699                 answer = "XXX"
1700                 while prompt.find(answer) == -1:
1701                     answer = utils.our_raw_input(prompt)
1702                     m = re_default_answer.search(prompt)
1703                     if answer == "":
1704                         answer = m.group(1)
1705                     answer = answer[:1].upper()
1706             os.unlink(temp_filename)
1707             if answer == 'A':
1708                 return 1
1709             elif answer == 'Q':
1710                 sys.exit(0)
1711
1712         print "Rejecting.\n"
1713
1714         cnf = Config()
1715
1716         reason_filename = self.pkg.changes_file[:-8] + ".reason"
1717         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1718
1719         # Move all the files into the reject directory
1720         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1721         self.force_reject(reject_files)
1722
1723         # If we fail here someone is probably trying to exploit the race
1724         # so let's just raise an exception ...
1725         if os.path.exists(reason_filename):
1726             os.unlink(reason_filename)
1727         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1728
1729         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1730
1731         if not manual:
1732             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1733             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1734             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1735             os.write(reason_fd, reject_message)
1736             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1737         else:
1738             # Build up the rejection email
1739             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1740             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1741             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1742             self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1743             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1744             # Write the rejection email out as the <foo>.reason file
1745             os.write(reason_fd, reject_mail_message)
1746
1747         del self.Subst["__REJECTOR_ADDRESS__"]
1748         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1749         del self.Subst["__CC__"]
1750
1751         os.close(reason_fd)
1752
1753         # Send the rejection mail if appropriate
1754         if not cnf["Dinstall::Options::No-Mail"]:
1755             utils.send_mail(reject_mail_message)
1756
1757         self.Logger.log(["rejected", pkg.changes_file])
1758
1759         return 0
1760
1761     ################################################################################
1762     def in_override_p(self, package, component, suite, binary_type, file, session=None):
1763         """
1764         Check if a package already has override entries in the DB
1765
1766         @type package: string
1767         @param package: package name
1768
1769         @type component: string
1770         @param component: database id of the component
1771
1772         @type suite: int
1773         @param suite: database id of the suite
1774
1775         @type binary_type: string
1776         @param binary_type: type of the package
1777
1778         @type file: string
1779         @param file: filename we check
1780
1781         @return: the database result. But noone cares anyway.
1782
1783         """
1784
1785         cnf = Config()
1786
1787         if session is None:
1788             session = DBConn().session()
1789
1790         if binary_type == "": # must be source
1791             file_type = "dsc"
1792         else:
1793             file_type = binary_type
1794
1795         # Override suite name; used for example with proposed-updates
1796         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1797             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1798
1799         result = get_override(package, suite, component, file_type, session)
1800
1801         # If checking for a source package fall back on the binary override type
1802         if file_type == "dsc" and len(result) < 1:
1803             result = get_override(package, suite, component, ['deb', 'udeb'], session)
1804
1805         # Remember the section and priority so we can check them later if appropriate
1806         if len(result) > 0:
1807             result = result[0]
1808             self.pkg.files[file]["override section"] = result.section.section
1809             self.pkg.files[file]["override priority"] = result.priority.priority
1810             return result
1811
1812         return None
1813
1814     ################################################################################
1815     def get_anyversion(self, sv_list, suite):
1816         """
1817         @type sv_list: list
1818         @param sv_list: list of (suite, version) tuples to check
1819
1820         @type suite: string
1821         @param suite: suite name
1822
1823         Description: TODO
1824         """
1825         anyversion = None
1826         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1827         for (s, v) in sv_list:
1828             if s in [ x.lower() for x in anysuite ]:
1829                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1830                     anyversion = v
1831
1832         return anyversion
1833
1834     ################################################################################
1835
1836     def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1837         """
1838         @type sv_list: list
1839         @param sv_list: list of (suite, version) tuples to check
1840
1841         @type file: string
1842         @param file: XXX
1843
1844         @type new_version: string
1845         @param new_version: XXX
1846
1847         Ensure versions are newer than existing packages in target
1848         suites and that cross-suite version checking rules as
1849         set out in the conf file are satisfied.
1850         """
1851
1852         cnf = Config()
1853
1854         # Check versions for each target suite
1855         for target_suite in self.pkg.changes["distribution"].keys():
1856             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1857             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1858
1859             # Enforce "must be newer than target suite" even if conffile omits it
1860             if target_suite not in must_be_newer_than:
1861                 must_be_newer_than.append(target_suite)
1862
1863             for (suite, existent_version) in sv_list:
1864                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1865
1866                 if suite in must_be_newer_than and sourceful and vercmp < 1:
1867                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1868
1869                 if suite in must_be_older_than and vercmp > -1:
1870                     cansave = 0
1871
1872                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1873                         # we really use the other suite, ignoring the conflicting one ...
1874                         addsuite = self.pkg.changes["distribution-version"][suite]
1875
1876                         add_version = self.get_anyversion(sv_list, addsuite)
1877                         target_version = self.get_anyversion(sv_list, target_suite)
1878
1879                         if not add_version:
1880                             # not add_version can only happen if we map to a suite
1881                             # that doesn't enhance the suite we're propup'ing from.
1882                             # so "propup-ver x a b c; map a d" is a problem only if
1883                             # d doesn't enhance a.
1884                             #
1885                             # i think we could always propagate in this case, rather
1886                             # than complaining. either way, this isn't a REJECT issue
1887                             #
1888                             # And - we really should complain to the dorks who configured dak
1889                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1890                             self.pkg.changes.setdefault("propdistribution", {})
1891                             self.pkg.changes["propdistribution"][addsuite] = 1
1892                             cansave = 1
1893                         elif not target_version:
1894                             # not targets_version is true when the package is NEW
1895                             # we could just stick with the "...old version..." REJECT
1896                             # for this, I think.
1897                             self.rejects.append("Won't propogate NEW packages.")
1898                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1899                             # propogation would be redundant. no need to reject though.
1900                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1901                             cansave = 1
1902                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1903                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
1904                             # propogate!!
1905                             self.warnings.append("Propogating upload to %s" % (addsuite))
1906                             self.pkg.changes.setdefault("propdistribution", {})
1907                             self.pkg.changes["propdistribution"][addsuite] = 1
1908                             cansave = 1
1909
1910                     if not cansave:
1911                         self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1912
1913     ################################################################################
1914     def check_binary_against_db(self, file, session=None):
1915         if session is None:
1916             session = DBConn().session()
1917
1918         # Ensure version is sane
1919         q = session.query(BinAssociation)
1920         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1921         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1922
1923         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1924                                        file, files[file]["version"], sourceful=False)
1925
1926         # Check for any existing copies of the file
1927         q = session.query(DBBinary).filter_by(files[file]["package"])
1928         q = q.filter_by(version=files[file]["version"])
1929         q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
1930
1931         if q.count() > 0:
1932             self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1933
1934     ################################################################################
1935
1936     def check_source_against_db(self, file, session=None):
1937         """
1938         """
1939         if session is None:
1940             session = DBConn().session()
1941
1942         source = self.pkg.dsc.get("source")
1943         version = self.pkg.dsc.get("version")
1944
1945         # Ensure version is sane
1946         q = session.query(SrcAssociation)
1947         q = q.join(DBSource).filter(DBSource.source==source)
1948
1949         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1950                                        file, version, sourceful=True)
1951
1952     ################################################################################
1953     def check_dsc_against_db(self, file):
1954         """
1955
1956         @warning: NB: this function can remove entries from the 'files' index [if
1957          the .orig.tar.gz is a duplicate of the one in the archive]; if
1958          you're iterating over 'files' and call this function as part of
1959          the loop, be sure to add a check to the top of the loop to
1960          ensure you haven't just tried to dereference the deleted entry.
1961
1962         """
1963         self.pkg.orig_tar_gz = None
1964
1965         # Try and find all files mentioned in the .dsc.  This has
1966         # to work harder to cope with the multiple possible
1967         # locations of an .orig.tar.gz.
1968         # The ordering on the select is needed to pick the newest orig
1969         # when it exists in multiple places.
1970         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1971             found = None
1972             if self.pkg.files.has_key(dsc_name):
1973                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1974                 actual_size = int(self.pkg.files[dsc_name]["size"])
1975                 found = "%s in incoming" % (dsc_name)
1976
1977                 # Check the file does not already exist in the archive
1978                 ql = get_poolfile_like_name(dsc_name)
1979
1980                 # Strip out anything that isn't '%s' or '/%s$'
1981                 for i in ql:
1982                     if not i.filename.endswith(dsc_name):
1983                         ql.remove(i)
1984
1985                 # "[dak] has not broken them.  [dak] has fixed a
1986                 # brokenness.  Your crappy hack exploited a bug in
1987                 # the old dinstall.
1988                 #
1989                 # "(Come on!  I thought it was always obvious that
1990                 # one just doesn't release different files with
1991                 # the same name and version.)"
1992                 #                        -- ajk@ on d-devel@l.d.o
1993
1994                 if len(ql) > 0:
1995                     # Ignore exact matches for .orig.tar.gz
1996                     match = 0
1997                     if dsc_name.endswith(".orig.tar.gz"):
1998                         for i in ql:
1999                             if self.pkg.files.has_key(dsc_name) and \
2000                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2001                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2002                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2003                                 # TODO: Don't delete the entry, just mark it as not needed
2004                                 # This would fix the stupidity of changing something we often iterate over
2005                                 # whilst we're doing it
2006                                 del files[dsc_name]
2007                                 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2008                                 match = 1
2009
2010                     if not match:
2011                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2012
2013             elif dsc_name.endswith(".orig.tar.gz"):
2014                 # Check in the pool
2015                 ql = get_poolfile_like_name(dsc_name)
2016
2017                 # Strip out anything that isn't '%s' or '/%s$'
2018                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2019                 for i in ql:
2020                     if not i.filename.endswith(dsc_name):
2021                         ql.remove(i)
2022
2023                 if len(ql) > 0:
2024                     # Unfortunately, we may get more than one match here if,
2025                     # for example, the package was in potato but had an -sa
2026                     # upload in woody.  So we need to choose the right one.
2027
2028                     # default to something sane in case we don't match any or have only one
2029                     x = ql[0]
2030
2031                     if len(ql) > 1:
2032                         for i in ql:
2033                             old_file = os.path.join(i.location.path, i.filename)
2034                             old_file_fh = utils.open_file(old_file)
2035                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2036                             old_file_fh.close()
2037                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2038                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2039                                 x = i
2040
2041                     old_file = os.path.join(i.location.path, i.filename)
2042                     old_file_fh = utils.open_file(old_file)
2043                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2044                     old_file_fh.close()
2045                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2046                     found = old_file
2047                     suite_type = f.location.archive_type
2048                     # need this for updating dsc_files in install()
2049                     dsc_entry["files id"] = f.file_id
2050                     # See install() in process-accepted...
2051                     self.pkg.orig_tar_id = f.file_id
2052                     self.pkg.orig_tar_gz = old_file
2053                     self.pkg.orig_tar_location = f.location.location_id
2054                 else:
2055                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2056                     # Not there? Check the queue directories...
2057                     for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2058                         in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2059                         if os.path.exists(in_otherdir):
2060                             in_otherdir_fh = utils.open_file(in_otherdir)
2061                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2062                             in_otherdir_fh.close()
2063                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2064                             found = in_otherdir
2065                             self.pkg.orig_tar_gz = in_otherdir
2066
2067                     if not found:
2068                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2069                         self.pkg.orig_tar_gz = -1
2070                         continue
2071             else:
2072                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2073                 continue
2074             if actual_md5 != dsc_entry["md5sum"]:
2075                 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2076             if actual_size != int(dsc_entry["size"]):
2077                 self.rejects.append("size for %s doesn't match %s." % (found, file))
2078
2079     ################################################################################
2080     # If any file of an upload has a recent mtime then chances are good
2081     # the file is still being uploaded.
2082
2083     def upload_too_new(self):
2084         cnf = Config()
2085         too_new = False
2086         # Move back to the original directory to get accurate time stamps
2087         cwd = os.getcwd()
2088         os.chdir(self.pkg.directory)
2089         file_list = self.pkg.files.keys()
2090         file_list.extend(self.pkg.dsc_files.keys())
2091         file_list.append(self.pkg.changes_file)
2092         for f in file_list:
2093             try:
2094                 last_modified = time.time()-os.path.getmtime(f)
2095                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2096                     too_new = True
2097                     break
2098             except:
2099                 pass
2100
2101         os.chdir(cwd)
2102         return too_new