]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
s/u/U/
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113
114     # Build up a list of potentially new things
115     for name, f in files.items():
116         # Skip byhand elements
117 #        if f["type"] == "byhand":
118 #            continue
119         pkg = f["package"]
120         priority = f["priority"]
121         section = f["section"]
122         file_type = get_type(f, session)
123         component = f["component"]
124
125         if file_type == "dsc":
126             priority = "source"
127
128         if not new.has_key(pkg):
129             new[pkg] = {}
130             new[pkg]["priority"] = priority
131             new[pkg]["section"] = section
132             new[pkg]["type"] = file_type
133             new[pkg]["component"] = component
134             new[pkg]["files"] = []
135         else:
136             old_type = new[pkg]["type"]
137             if old_type != file_type:
138                 # source gets trumped by deb or udeb
139                 if old_type == "dsc":
140                     new[pkg]["priority"] = priority
141                     new[pkg]["section"] = section
142                     new[pkg]["type"] = file_type
143                     new[pkg]["component"] = component
144
145         new[pkg]["files"].append(name)
146
147         if f.has_key("othercomponents"):
148             new[pkg]["othercomponents"] = f["othercomponents"]
149
150     # Fix up the list of target suites
151     cnf = Config()
152     for suite in changes["suite"].keys():
153         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154         if override:
155             (olderr, newerr) = (get_suite(suite, session) == None,
156                                 get_suite(override, session) == None)
157             if olderr or newerr:
158                 (oinv, newinv) = ("", "")
159                 if olderr: oinv = "invalid "
160                 if newerr: ninv = "invalid "
161                 print "warning: overriding %ssuite %s to %ssuite %s" % (
162                         oinv, suite, ninv, override)
163             del changes["suite"][suite]
164             changes["suite"][override] = 1
165
166     for suite in changes["suite"].keys():
167         for pkg in new.keys():
168             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169             if len(ql) > 0:
170                 for file_entry in new[pkg]["files"]:
171                     if files[file_entry].has_key("new"):
172                         del files[file_entry]["new"]
173                 del new[pkg]
174
175     if warn:
176         for s in ['stable', 'oldstable']:
177             if changes["suite"].has_key(s):
178                 print "WARNING: overrides will be added for %s!" % s
179         for pkg in new.keys():
180             if new[pkg].has_key("othercomponents"):
181                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
182
183     return new
184
185 ################################################################################
186
187 def check_valid(new, session = None):
188     """
189     Check if section and priority for NEW packages exist in database.
190     Additionally does sanity checks:
191       - debian-installer packages have to be udeb (or source)
192       - non debian-installer packages can not be udeb
193       - source priority can only be assigned to dsc file types
194
195     @type new: dict
196     @param new: Dict of new packages with their section, priority and type.
197
198     """
199     for pkg in new.keys():
200         section_name = new[pkg]["section"]
201         priority_name = new[pkg]["priority"]
202         file_type = new[pkg]["type"]
203
204         section = get_section(section_name, session)
205         if section is None:
206             new[pkg]["section id"] = -1
207         else:
208             new[pkg]["section id"] = section.section_id
209
210         priority = get_priority(priority_name, session)
211         if priority is None:
212             new[pkg]["priority id"] = -1
213         else:
214             new[pkg]["priority id"] = priority.priority_id
215
216         # Sanity checks
217         di = section_name.find("debian-installer") != -1
218
219         # If d-i, we must be udeb and vice-versa
220         if     (di and file_type not in ("udeb", "dsc")) or \
221            (not di and file_type == "udeb"):
222             new[pkg]["section id"] = -1
223
224         # If dsc we need to be source and vice-versa
225         if (priority == "source" and file_type != "dsc") or \
226            (priority != "source" and file_type == "dsc"):
227             new[pkg]["priority id"] = -1
228
229 ###############################################################################
230
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233     def __init__(self, future_cutoff, past_cutoff):
234         self.reset()
235         self.future_cutoff = future_cutoff
236         self.past_cutoff = past_cutoff
237
238     def reset(self):
239         self.future_files = {}
240         self.ancient_files = {}
241
242     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243         if MTime > self.future_cutoff:
244             self.future_files[Name] = MTime
245         if MTime < self.past_cutoff:
246             self.ancient_files[Name] = MTime
247
248 ###############################################################################
249
250 class Upload(object):
251     """
252     Everything that has to do with an upload processed.
253
254     """
255     def __init__(self):
256         self.logger = None
257         self.pkg = Changes()
258         self.reset()
259
260     ###########################################################################
261
262     def reset (self):
263         """ Reset a number of internal variables."""
264
265         # Initialize the substitution template map
266         cnf = Config()
267         self.Subst = {}
268         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
272
273         self.rejects = []
274         self.warnings = []
275         self.notes = []
276
277         self.later_check_files = []
278
279         self.pkg.reset()
280
281     def package_info(self):
282         """
283         Format various messages from this Upload to send to the maintainer.
284         """
285
286         msgs = (
287             ('Reject Reasons', self.rejects),
288             ('Warnings', self.warnings),
289             ('Notes', self.notes),
290         )
291
292         msg = ''
293         for title, messages in msgs:
294             if messages:
295                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296         msg += '\n\n'
297
298         return msg
299
300     ###########################################################################
301     def update_subst(self):
302         """ Set up the per-package template substitution mappings """
303
304         cnf = Config()
305
306         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307         if not self.pkg.changes.has_key("architecture") or not \
308            isinstance(self.pkg.changes["architecture"], dict):
309             self.pkg.changes["architecture"] = { "Unknown" : "" }
310
311         # and maintainer2047 may not exist.
312         if not self.pkg.changes.has_key("maintainer2047"):
313             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
314
315         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
318
319         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320         if self.pkg.changes["architecture"].has_key("source") and \
321            self.pkg.changes["changedby822"] != "" and \
322            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
323
324             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
327         else:
328             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
331
332         # Process policy doesn't set the fingerprint field and I don't want to make it
333         # do it for now as I don't want to have to deal with the case where we accepted
334         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
335         # the meantime so the package will be remarked as rejectable.  Urgh.
336         # TODO: Fix this properly
337         if self.pkg.changes.has_key('fingerprint'):
338             session = DBConn().session()
339             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
340             if self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
341                 if self.pkg.changes.has_key("sponsoremail"):
342                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
343             session.close()
344
345         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
346             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
347
348         # Apply any global override of the Maintainer field
349         if cnf.get("Dinstall::OverrideMaintainer"):
350             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
351             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
352
353         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
354         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
355         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
356         self.Subst["__SUITE__"] = self.pkg.changes.get("distribution", "Unknown")
357
358     ###########################################################################
359     def load_changes(self, filename):
360         """
361         Load a changes file and setup a dictionary around it. Also checks for mandantory
362         fields  within.
363
364         @type filename: string
365         @param filename: Changes filename, full path.
366
367         @rtype: boolean
368         @return: whether the changes file was valid or not.  We may want to
369                  reject even if this is True (see what gets put in self.rejects).
370                  This is simply to prevent us even trying things later which will
371                  fail because we couldn't properly parse the file.
372         """
373         Cnf = Config()
374         self.pkg.changes_file = filename
375
376         # Parse the .changes field into a dictionary
377         try:
378             self.pkg.changes.update(parse_changes(filename))
379         except CantOpenError:
380             self.rejects.append("%s: can't read file." % (filename))
381             return False
382         except ParseChangesError, line:
383             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
384             return False
385         except ChangesUnicodeError:
386             self.rejects.append("%s: changes file not proper utf-8" % (filename))
387             return False
388
389         # Parse the Files field from the .changes into another dictionary
390         try:
391             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
392         except ParseChangesError, line:
393             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
394             return False
395         except UnknownFormatError, format:
396             self.rejects.append("%s: unknown format '%s'." % (filename, format))
397             return False
398
399         # Check for mandatory fields
400         for i in ("distribution", "source", "binary", "architecture",
401                   "version", "maintainer", "files", "changes", "description"):
402             if not self.pkg.changes.has_key(i):
403                 # Avoid undefined errors later
404                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
405                 return False
406
407         # Strip a source version in brackets from the source field
408         if re_strip_srcver.search(self.pkg.changes["source"]):
409             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
410
411         # Ensure the source field is a valid package name.
412         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
413             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
414
415         # Split multi-value fields into a lower-level dictionary
416         for i in ("architecture", "distribution", "binary", "closes"):
417             o = self.pkg.changes.get(i, "")
418             if o != "":
419                 del self.pkg.changes[i]
420
421             self.pkg.changes[i] = {}
422
423             for j in o.split():
424                 self.pkg.changes[i][j] = 1
425
426         # Fix the Maintainer: field to be RFC822/2047 compatible
427         try:
428             (self.pkg.changes["maintainer822"],
429              self.pkg.changes["maintainer2047"],
430              self.pkg.changes["maintainername"],
431              self.pkg.changes["maintaineremail"]) = \
432                    fix_maintainer (self.pkg.changes["maintainer"])
433         except ParseMaintError, msg:
434             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
435                    % (filename, self.pkg.changes["maintainer"], msg))
436
437         # ...likewise for the Changed-By: field if it exists.
438         try:
439             (self.pkg.changes["changedby822"],
440              self.pkg.changes["changedby2047"],
441              self.pkg.changes["changedbyname"],
442              self.pkg.changes["changedbyemail"]) = \
443                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
444         except ParseMaintError, msg:
445             self.pkg.changes["changedby822"] = ""
446             self.pkg.changes["changedby2047"] = ""
447             self.pkg.changes["changedbyname"] = ""
448             self.pkg.changes["changedbyemail"] = ""
449
450             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
451                    % (filename, self.pkg.changes["changed-by"], msg))
452
453         # Ensure all the values in Closes: are numbers
454         if self.pkg.changes.has_key("closes"):
455             for i in self.pkg.changes["closes"].keys():
456                 if re_isanum.match (i) == None:
457                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
458
459         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
460         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
461         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
462
463         # Check the .changes is non-empty
464         if not self.pkg.files:
465             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
466             return False
467
468         # Changes was syntactically valid even if we'll reject
469         return True
470
471     ###########################################################################
472
473     def check_distributions(self):
474         "Check and map the Distribution field"
475
476         Cnf = Config()
477
478         # Handle suite mappings
479         for m in Cnf.ValueList("SuiteMappings"):
480             args = m.split()
481             mtype = args[0]
482             if mtype == "map" or mtype == "silent-map":
483                 (source, dest) = args[1:3]
484                 if self.pkg.changes["distribution"].has_key(source):
485                     del self.pkg.changes["distribution"][source]
486                     self.pkg.changes["distribution"][dest] = 1
487                     if mtype != "silent-map":
488                         self.notes.append("Mapping %s to %s." % (source, dest))
489                 if self.pkg.changes.has_key("distribution-version"):
490                     if self.pkg.changes["distribution-version"].has_key(source):
491                         self.pkg.changes["distribution-version"][source]=dest
492             elif mtype == "map-unreleased":
493                 (source, dest) = args[1:3]
494                 if self.pkg.changes["distribution"].has_key(source):
495                     for arch in self.pkg.changes["architecture"].keys():
496                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
497                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
498                             del self.pkg.changes["distribution"][source]
499                             self.pkg.changes["distribution"][dest] = 1
500                             break
501             elif mtype == "ignore":
502                 suite = args[1]
503                 if self.pkg.changes["distribution"].has_key(suite):
504                     del self.pkg.changes["distribution"][suite]
505                     self.warnings.append("Ignoring %s as a target suite." % (suite))
506             elif mtype == "reject":
507                 suite = args[1]
508                 if self.pkg.changes["distribution"].has_key(suite):
509                     self.rejects.append("Uploads to %s are not accepted." % (suite))
510             elif mtype == "propup-version":
511                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
512                 #
513                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
514                 if self.pkg.changes["distribution"].has_key(args[1]):
515                     self.pkg.changes.setdefault("distribution-version", {})
516                     for suite in args[2:]:
517                         self.pkg.changes["distribution-version"][suite] = suite
518
519         # Ensure there is (still) a target distribution
520         if len(self.pkg.changes["distribution"].keys()) < 1:
521             self.rejects.append("No valid distribution remaining.")
522
523         # Ensure target distributions exist
524         for suite in self.pkg.changes["distribution"].keys():
525             if not Cnf.has_key("Suite::%s" % (suite)):
526                 self.rejects.append("Unknown distribution `%s'." % (suite))
527
528     ###########################################################################
529
530     def binary_file_checks(self, f, session):
531         cnf = Config()
532         entry = self.pkg.files[f]
533
534         # Extract package control information
535         deb_file = utils.open_file(f)
536         try:
537             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
538         except:
539             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
540             deb_file.close()
541             # Can't continue, none of the checks on control would work.
542             return
543
544         # Check for mandantory "Description:"
545         deb_file.seek(0)
546         try:
547             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
548         except:
549             self.rejects.append("%s: Missing Description in binary package" % (f))
550             return
551
552         deb_file.close()
553
554         # Check for mandatory fields
555         for field in [ "Package", "Architecture", "Version" ]:
556             if control.Find(field) == None:
557                 # Can't continue
558                 self.rejects.append("%s: No %s field in control." % (f, field))
559                 return
560
561         # Ensure the package name matches the one give in the .changes
562         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
563             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
564
565         # Validate the package field
566         package = control.Find("Package")
567         if not re_valid_pkg_name.match(package):
568             self.rejects.append("%s: invalid package name '%s'." % (f, package))
569
570         # Validate the version field
571         version = control.Find("Version")
572         if not re_valid_version.match(version):
573             self.rejects.append("%s: invalid version number '%s'." % (f, version))
574
575         # Ensure the architecture of the .deb is one we know about.
576         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
577         architecture = control.Find("Architecture")
578         upload_suite = self.pkg.changes["distribution"].keys()[0]
579
580         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
581             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
582             self.rejects.append("Unknown architecture '%s'." % (architecture))
583
584         # Ensure the architecture of the .deb is one of the ones
585         # listed in the .changes.
586         if not self.pkg.changes["architecture"].has_key(architecture):
587             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
588
589         # Sanity-check the Depends field
590         depends = control.Find("Depends")
591         if depends == '':
592             self.rejects.append("%s: Depends field is empty." % (f))
593
594         # Sanity-check the Provides field
595         provides = control.Find("Provides")
596         if provides:
597             provide = re_spacestrip.sub('', provides)
598             if provide == '':
599                 self.rejects.append("%s: Provides field is empty." % (f))
600             prov_list = provide.split(",")
601             for prov in prov_list:
602                 if not re_valid_pkg_name.match(prov):
603                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
604
605         # Check the section & priority match those given in the .changes (non-fatal)
606         if     control.Find("Section") and entry["section"] != "" \
607            and entry["section"] != control.Find("Section"):
608             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
609                                 (f, control.Find("Section", ""), entry["section"]))
610         if control.Find("Priority") and entry["priority"] != "" \
611            and entry["priority"] != control.Find("Priority"):
612             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
613                                 (f, control.Find("Priority", ""), entry["priority"]))
614
615         entry["package"] = package
616         entry["architecture"] = architecture
617         entry["version"] = version
618         entry["maintainer"] = control.Find("Maintainer", "")
619
620         if f.endswith(".udeb"):
621             self.pkg.files[f]["dbtype"] = "udeb"
622         elif f.endswith(".deb"):
623             self.pkg.files[f]["dbtype"] = "deb"
624         else:
625             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
626
627         entry["source"] = control.Find("Source", entry["package"])
628
629         # Get the source version
630         source = entry["source"]
631         source_version = ""
632
633         if source.find("(") != -1:
634             m = re_extract_src_version.match(source)
635             source = m.group(1)
636             source_version = m.group(2)
637
638         if not source_version:
639             source_version = self.pkg.files[f]["version"]
640
641         entry["source package"] = source
642         entry["source version"] = source_version
643
644         # Ensure the filename matches the contents of the .deb
645         m = re_isadeb.match(f)
646
647         #  package name
648         file_package = m.group(1)
649         if entry["package"] != file_package:
650             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
651                                 (f, file_package, entry["dbtype"], entry["package"]))
652         epochless_version = re_no_epoch.sub('', control.Find("Version"))
653
654         #  version
655         file_version = m.group(2)
656         if epochless_version != file_version:
657             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
658                                 (f, file_version, entry["dbtype"], epochless_version))
659
660         #  architecture
661         file_architecture = m.group(3)
662         if entry["architecture"] != file_architecture:
663             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
664                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
665
666         # Check for existent source
667         source_version = entry["source version"]
668         source_package = entry["source package"]
669         if self.pkg.changes["architecture"].has_key("source"):
670             if source_version != self.pkg.changes["version"]:
671                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
672                                     (source_version, f, self.pkg.changes["version"]))
673         else:
674             # Check in the SQL database
675             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
676                 # Check in one of the other directories
677                 source_epochless_version = re_no_epoch.sub('', source_version)
678                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
679                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
680                     entry["byhand"] = 1
681                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
682                     entry["new"] = 1
683                 else:
684                     dsc_file_exists = False
685                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
686                         if cnf.has_key("Dir::Queue::%s" % (myq)):
687                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
688                                 dsc_file_exists = True
689                                 break
690
691                     if not dsc_file_exists:
692                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
693
694         # Check the version and for file overwrites
695         self.check_binary_against_db(f, session)
696
697         # Temporarily disable contents generation until we change the table storage layout
698         #b = Binary(f)
699         #b.scan_package()
700         #if len(b.rejects) > 0:
701         #    for j in b.rejects:
702         #        self.rejects.append(j)
703
704     def source_file_checks(self, f, session):
705         entry = self.pkg.files[f]
706
707         m = re_issource.match(f)
708         if not m:
709             return
710
711         entry["package"] = m.group(1)
712         entry["version"] = m.group(2)
713         entry["type"] = m.group(3)
714
715         # Ensure the source package name matches the Source filed in the .changes
716         if self.pkg.changes["source"] != entry["package"]:
717             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
718
719         # Ensure the source version matches the version in the .changes file
720         if re_is_orig_source.match(f):
721             changes_version = self.pkg.changes["chopversion2"]
722         else:
723             changes_version = self.pkg.changes["chopversion"]
724
725         if changes_version != entry["version"]:
726             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
727
728         # Ensure the .changes lists source in the Architecture field
729         if not self.pkg.changes["architecture"].has_key("source"):
730             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
731
732         # Check the signature of a .dsc file
733         if entry["type"] == "dsc":
734             # check_signature returns either:
735             #  (None, [list, of, rejects]) or (signature, [])
736             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
737             for j in rejects:
738                 self.rejects.append(j)
739
740         entry["architecture"] = "source"
741
742     def per_suite_file_checks(self, f, suite, session):
743         cnf = Config()
744         entry = self.pkg.files[f]
745
746         # Skip byhand
747         if entry.has_key("byhand"):
748             return
749
750         # Check we have fields we need to do these checks
751         oktogo = True
752         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
753             if not entry.has_key(m):
754                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
755                 oktogo = False
756
757         if not oktogo:
758             return
759
760         # Handle component mappings
761         for m in cnf.ValueList("ComponentMappings"):
762             (source, dest) = m.split()
763             if entry["component"] == source:
764                 entry["original component"] = source
765                 entry["component"] = dest
766
767         # Ensure the component is valid for the target suite
768         if cnf.has_key("Suite:%s::Components" % (suite)) and \
769            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
770             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
771             return
772
773         # Validate the component
774         if not get_component(entry["component"], session):
775             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
776             return
777
778         # See if the package is NEW
779         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
780             entry["new"] = 1
781
782         # Validate the priority
783         if entry["priority"].find('/') != -1:
784             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
785
786         # Determine the location
787         location = cnf["Dir::Pool"]
788         l = get_location(location, entry["component"], session=session)
789         if l is None:
790             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
791             entry["location id"] = -1
792         else:
793             entry["location id"] = l.location_id
794
795         # Check the md5sum & size against existing files (if any)
796         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
797
798         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
799                                          entry["size"], entry["md5sum"], entry["location id"])
800
801         if found is None:
802             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
803         elif found is False and poolfile is not None:
804             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
805         else:
806             if poolfile is None:
807                 entry["files id"] = None
808             else:
809                 entry["files id"] = poolfile.file_id
810
811         # Check for packages that have moved from one component to another
812         entry['suite'] = suite
813         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
814         if res.rowcount > 0:
815             entry["othercomponents"] = res.fetchone()[0]
816
817     def check_files(self, action=True):
818         file_keys = self.pkg.files.keys()
819         holding = Holding()
820         cnf = Config()
821
822         if action:
823             cwd = os.getcwd()
824             os.chdir(self.pkg.directory)
825             for f in file_keys:
826                 ret = holding.copy_to_holding(f)
827                 if ret is not None:
828                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
829
830             os.chdir(cwd)
831
832         # check we already know the changes file
833         # [NB: this check must be done post-suite mapping]
834         base_filename = os.path.basename(self.pkg.changes_file)
835
836         session = DBConn().session()
837
838         try:
839             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
840             # if in the pool or in a queue other than unchecked, reject
841             if (dbc.in_queue is None) \
842                    or (dbc.in_queue is not None
843                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
844                 self.rejects.append("%s file already known to dak" % base_filename)
845         except NoResultFound, e:
846             # not known, good
847             pass
848
849         has_binaries = False
850         has_source = False
851
852         for f, entry in self.pkg.files.items():
853             # Ensure the file does not already exist in one of the accepted directories
854             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
855                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
856                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
857                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
858
859             if not re_taint_free.match(f):
860                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
861
862             # Check the file is readable
863             if os.access(f, os.R_OK) == 0:
864                 # When running in -n, copy_to_holding() won't have
865                 # generated the reject_message, so we need to.
866                 if action:
867                     if os.path.exists(f):
868                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
869                     else:
870                         # Don't directly reject, mark to check later to deal with orig's
871                         # we can find in the pool
872                         self.later_check_files.append(f)
873                 entry["type"] = "unreadable"
874                 continue
875
876             # If it's byhand skip remaining checks
877             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
878                 entry["byhand"] = 1
879                 entry["type"] = "byhand"
880
881             # Checks for a binary package...
882             elif re_isadeb.match(f):
883                 has_binaries = True
884                 entry["type"] = "deb"
885
886                 # This routine appends to self.rejects/warnings as appropriate
887                 self.binary_file_checks(f, session)
888
889             # Checks for a source package...
890             elif re_issource.match(f):
891                 has_source = True
892
893                 # This routine appends to self.rejects/warnings as appropriate
894                 self.source_file_checks(f, session)
895
896             # Not a binary or source package?  Assume byhand...
897             else:
898                 entry["byhand"] = 1
899                 entry["type"] = "byhand"
900
901             # Per-suite file checks
902             entry["oldfiles"] = {}
903             for suite in self.pkg.changes["distribution"].keys():
904                 self.per_suite_file_checks(f, suite, session)
905
906         session.close()
907
908         # If the .changes file says it has source, it must have source.
909         if self.pkg.changes["architecture"].has_key("source"):
910             if not has_source:
911                 self.rejects.append("no source found and Architecture line in changes mention source.")
912
913             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
914                 self.rejects.append("source only uploads are not supported.")
915
916     ###########################################################################
917     def check_dsc(self, action=True, session=None):
918         """Returns bool indicating whether or not the source changes are valid"""
919         # Ensure there is source to check
920         if not self.pkg.changes["architecture"].has_key("source"):
921             return True
922
923         # Find the .dsc
924         dsc_filename = None
925         for f, entry in self.pkg.files.items():
926             if entry["type"] == "dsc":
927                 if dsc_filename:
928                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
929                     return False
930                 else:
931                     dsc_filename = f
932
933         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
934         if not dsc_filename:
935             self.rejects.append("source uploads must contain a dsc file")
936             return False
937
938         # Parse the .dsc file
939         try:
940             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
941         except CantOpenError:
942             # if not -n copy_to_holding() will have done this for us...
943             if not action:
944                 self.rejects.append("%s: can't read file." % (dsc_filename))
945         except ParseChangesError, line:
946             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
947         except InvalidDscError, line:
948             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
949         except ChangesUnicodeError:
950             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
951
952         # Build up the file list of files mentioned by the .dsc
953         try:
954             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
955         except NoFilesFieldError:
956             self.rejects.append("%s: no Files: field." % (dsc_filename))
957             return False
958         except UnknownFormatError, format:
959             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
960             return False
961         except ParseChangesError, line:
962             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
963             return False
964
965         # Enforce mandatory fields
966         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
967             if not self.pkg.dsc.has_key(i):
968                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
969                 return False
970
971         # Validate the source and version fields
972         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
973             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
974         if not re_valid_version.match(self.pkg.dsc["version"]):
975             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
976
977         # Only a limited list of source formats are allowed in each suite
978         for dist in self.pkg.changes["distribution"].keys():
979             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
980             if self.pkg.dsc["format"] not in allowed:
981                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
982
983         # Validate the Maintainer field
984         try:
985             # We ignore the return value
986             fix_maintainer(self.pkg.dsc["maintainer"])
987         except ParseMaintError, msg:
988             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
989                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
990
991         # Validate the build-depends field(s)
992         for field_name in [ "build-depends", "build-depends-indep" ]:
993             field = self.pkg.dsc.get(field_name)
994             if field:
995                 # Have apt try to parse them...
996                 try:
997                     apt_pkg.ParseSrcDepends(field)
998                 except:
999                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1000
1001         # Ensure the version number in the .dsc matches the version number in the .changes
1002         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1003         changes_version = self.pkg.files[dsc_filename]["version"]
1004
1005         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1006             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1007
1008         # Ensure the Files field contain only what's expected
1009         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1010
1011         # Ensure source is newer than existing source in target suites
1012         session = DBConn().session()
1013         self.check_source_against_db(dsc_filename, session)
1014         self.check_dsc_against_db(dsc_filename, session)
1015         session.close()
1016
1017         # Finally, check if we're missing any files
1018         for f in self.later_check_files:
1019             self.rejects.append("Could not find file %s references in changes" % f)
1020
1021         return True
1022
1023     ###########################################################################
1024
1025     def get_changelog_versions(self, source_dir):
1026         """Extracts a the source package and (optionally) grabs the
1027         version history out of debian/changelog for the BTS."""
1028
1029         cnf = Config()
1030
1031         # Find the .dsc (again)
1032         dsc_filename = None
1033         for f in self.pkg.files.keys():
1034             if self.pkg.files[f]["type"] == "dsc":
1035                 dsc_filename = f
1036
1037         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1038         if not dsc_filename:
1039             return
1040
1041         # Create a symlink mirror of the source files in our temporary directory
1042         for f in self.pkg.files.keys():
1043             m = re_issource.match(f)
1044             if m:
1045                 src = os.path.join(source_dir, f)
1046                 # If a file is missing for whatever reason, give up.
1047                 if not os.path.exists(src):
1048                     return
1049                 ftype = m.group(3)
1050                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1051                    self.pkg.orig_files[f].has_key("path"):
1052                     continue
1053                 dest = os.path.join(os.getcwd(), f)
1054                 os.symlink(src, dest)
1055
1056         # If the orig files are not a part of the upload, create symlinks to the
1057         # existing copies.
1058         for orig_file in self.pkg.orig_files.keys():
1059             if not self.pkg.orig_files[orig_file].has_key("path"):
1060                 continue
1061             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1062             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1063
1064         # Extract the source
1065         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1066         (result, output) = commands.getstatusoutput(cmd)
1067         if (result != 0):
1068             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1069             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1070             return
1071
1072         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1073             return
1074
1075         # Get the upstream version
1076         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1077         if re_strip_revision.search(upstr_version):
1078             upstr_version = re_strip_revision.sub('', upstr_version)
1079
1080         # Ensure the changelog file exists
1081         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1082         if not os.path.exists(changelog_filename):
1083             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1084             return
1085
1086         # Parse the changelog
1087         self.pkg.dsc["bts changelog"] = ""
1088         changelog_file = utils.open_file(changelog_filename)
1089         for line in changelog_file.readlines():
1090             m = re_changelog_versions.match(line)
1091             if m:
1092                 self.pkg.dsc["bts changelog"] += line
1093         changelog_file.close()
1094
1095         # Check we found at least one revision in the changelog
1096         if not self.pkg.dsc["bts changelog"]:
1097             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1098
1099     def check_source(self):
1100         # Bail out if:
1101         #    a) there's no source
1102         if not self.pkg.changes["architecture"].has_key("source"):
1103             return
1104
1105         tmpdir = utils.temp_dirname()
1106
1107         # Move into the temporary directory
1108         cwd = os.getcwd()
1109         os.chdir(tmpdir)
1110
1111         # Get the changelog version history
1112         self.get_changelog_versions(cwd)
1113
1114         # Move back and cleanup the temporary tree
1115         os.chdir(cwd)
1116
1117         try:
1118             shutil.rmtree(tmpdir)
1119         except OSError, e:
1120             if e.errno != errno.EACCES:
1121                 print "foobar"
1122                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1123
1124             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1125             # We probably have u-r or u-w directories so chmod everything
1126             # and try again.
1127             cmd = "chmod -R u+rwx %s" % (tmpdir)
1128             result = os.system(cmd)
1129             if result != 0:
1130                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1131             shutil.rmtree(tmpdir)
1132         except Exception, e:
1133             print "foobar2 (%s)" % e
1134             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1135
1136     ###########################################################################
1137     def ensure_hashes(self):
1138         # Make sure we recognise the format of the Files: field in the .changes
1139         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1140         if len(format) == 2:
1141             format = int(format[0]), int(format[1])
1142         else:
1143             format = int(float(format[0])), 0
1144
1145         # We need to deal with the original changes blob, as the fields we need
1146         # might not be in the changes dict serialised into the .dak anymore.
1147         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1148
1149         # Copy the checksums over to the current changes dict.  This will keep
1150         # the existing modifications to it intact.
1151         for field in orig_changes:
1152             if field.startswith('checksums-'):
1153                 self.pkg.changes[field] = orig_changes[field]
1154
1155         # Check for unsupported hashes
1156         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1157             self.rejects.append(j)
1158
1159         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1160             self.rejects.append(j)
1161
1162         # We have to calculate the hash if we have an earlier changes version than
1163         # the hash appears in rather than require it exist in the changes file
1164         for hashname, hashfunc, version in utils.known_hashes:
1165             # TODO: Move _ensure_changes_hash into this class
1166             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1167                 self.rejects.append(j)
1168             if "source" in self.pkg.changes["architecture"]:
1169                 # TODO: Move _ensure_dsc_hash into this class
1170                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1171                     self.rejects.append(j)
1172
1173     def check_hashes(self):
1174         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1175             self.rejects.append(m)
1176
1177         for m in utils.check_size(".changes", self.pkg.files):
1178             self.rejects.append(m)
1179
1180         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1181             self.rejects.append(m)
1182
1183         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1184             self.rejects.append(m)
1185
1186         self.ensure_hashes()
1187
1188     ###########################################################################
1189
1190     def ensure_orig(self, target_dir='.', session=None):
1191         """
1192         Ensures that all orig files mentioned in the changes file are present
1193         in target_dir. If they do not exist, they are symlinked into place.
1194
1195         An list containing the symlinks that were created are returned (so they
1196         can be removed).
1197         """
1198
1199         symlinked = []
1200         cnf = Config()
1201
1202         for filename, entry in self.pkg.dsc_files.iteritems():
1203             if not re_is_orig_source.match(filename):
1204                 # File is not an orig; ignore
1205                 continue
1206
1207             if os.path.exists(filename):
1208                 # File exists, no need to continue
1209                 continue
1210
1211             def symlink_if_valid(path):
1212                 f = utils.open_file(path)
1213                 md5sum = apt_pkg.md5sum(f)
1214                 f.close()
1215
1216                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1217                 expected = (int(entry['size']), entry['md5sum'])
1218
1219                 if fingerprint != expected:
1220                     return False
1221
1222                 dest = os.path.join(target_dir, filename)
1223
1224                 os.symlink(path, dest)
1225                 symlinked.append(dest)
1226
1227                 return True
1228
1229             session_ = session
1230             if session is None:
1231                 session_ = DBConn().session()
1232
1233             found = False
1234
1235             # Look in the pool
1236             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1237                 poolfile_path = os.path.join(
1238                     poolfile.location.path, poolfile.filename
1239                 )
1240
1241                 if symlink_if_valid(poolfile_path):
1242                     found = True
1243                     break
1244
1245             if session is None:
1246                 session_.close()
1247
1248             if found:
1249                 continue
1250
1251             # Look in some other queues for the file
1252             queues = ('New', 'Byhand', 'ProposedUpdates',
1253                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1254
1255             for queue in queues:
1256                 if not cnf.get('Dir::Queue::%s' % queue):
1257                     continue
1258
1259                 queuefile_path = os.path.join(
1260                     cnf['Dir::Queue::%s' % queue], filename
1261                 )
1262
1263                 if not os.path.exists(queuefile_path):
1264                     # Does not exist in this queue
1265                     continue
1266
1267                 if symlink_if_valid(queuefile_path):
1268                     break
1269
1270         return symlinked
1271
1272     ###########################################################################
1273
1274     def check_lintian(self):
1275         """
1276         Extends self.rejects by checking the output of lintian against tags
1277         specified in Dinstall::LintianTags.
1278         """
1279
1280         cnf = Config()
1281
1282         # Don't reject binary uploads
1283         if not self.pkg.changes['architecture'].has_key('source'):
1284             return
1285
1286         # Only check some distributions
1287         for dist in ('unstable', 'experimental'):
1288             if dist in self.pkg.changes['distribution']:
1289                 break
1290         else:
1291             return
1292
1293         # If we do not have a tagfile, don't do anything
1294         tagfile = cnf.get("Dinstall::LintianTags")
1295         if tagfile is None:
1296             return
1297
1298         # Parse the yaml file
1299         sourcefile = file(tagfile, 'r')
1300         sourcecontent = sourcefile.read()
1301         sourcefile.close()
1302
1303         try:
1304             lintiantags = yaml.load(sourcecontent)['lintian']
1305         except yaml.YAMLError, msg:
1306             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1307             return
1308
1309         # Try and find all orig mentioned in the .dsc
1310         symlinked = self.ensure_orig()
1311
1312         # Setup the input file for lintian
1313         fd, temp_filename = utils.temp_filename()
1314         temptagfile = os.fdopen(fd, 'w')
1315         for tags in lintiantags.values():
1316             temptagfile.writelines(['%s\n' % x for x in tags])
1317         temptagfile.close()
1318
1319         try:
1320             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1321                 (temp_filename, self.pkg.changes_file)
1322
1323             result, output = commands.getstatusoutput(cmd)
1324         finally:
1325             # Remove our tempfile and any symlinks we created
1326             os.unlink(temp_filename)
1327
1328             for symlink in symlinked:
1329                 os.unlink(symlink)
1330
1331         if result == 2:
1332             utils.warn("lintian failed for %s [return code: %s]." % \
1333                 (self.pkg.changes_file, result))
1334             utils.warn(utils.prefix_multi_line_string(output, \
1335                 " [possible output:] "))
1336
1337         def log(*txt):
1338             if self.logger:
1339                 self.logger.log(
1340                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1341                 )
1342
1343         # Generate messages
1344         parsed_tags = parse_lintian_output(output)
1345         self.rejects.extend(
1346             generate_reject_messages(parsed_tags, lintiantags, log=log)
1347         )
1348
1349     ###########################################################################
1350     def check_urgency(self):
1351         cnf = Config()
1352         if self.pkg.changes["architecture"].has_key("source"):
1353             if not self.pkg.changes.has_key("urgency"):
1354                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1355             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1356             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1357                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1358                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1359                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1360
1361     ###########################################################################
1362
1363     # Sanity check the time stamps of files inside debs.
1364     # [Files in the near future cause ugly warnings and extreme time
1365     #  travel can cause errors on extraction]
1366
1367     def check_timestamps(self):
1368         Cnf = Config()
1369
1370         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1371         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1372         tar = TarTime(future_cutoff, past_cutoff)
1373
1374         for filename, entry in self.pkg.files.items():
1375             if entry["type"] == "deb":
1376                 tar.reset()
1377                 try:
1378                     deb_file = utils.open_file(filename)
1379                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1380                     deb_file.seek(0)
1381                     try:
1382                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1383                     except SystemError, e:
1384                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1385                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1386                             raise
1387                         deb_file.seek(0)
1388                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1389
1390                     deb_file.close()
1391
1392                     future_files = tar.future_files.keys()
1393                     if future_files:
1394                         num_future_files = len(future_files)
1395                         future_file = future_files[0]
1396                         future_date = tar.future_files[future_file]
1397                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1398                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1399
1400                     ancient_files = tar.ancient_files.keys()
1401                     if ancient_files:
1402                         num_ancient_files = len(ancient_files)
1403                         ancient_file = ancient_files[0]
1404                         ancient_date = tar.ancient_files[ancient_file]
1405                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1406                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1407                 except:
1408                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1409
1410     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1411         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1412             sponsored = False
1413         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1414             sponsored = False
1415             if uid_name == "":
1416                 sponsored = True
1417         else:
1418             sponsored = True
1419             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1420                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1421                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1422                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1423                         self.pkg.changes["sponsoremail"] = uid_email
1424
1425         return sponsored
1426
1427
1428     ###########################################################################
1429     # check_signed_by_key checks
1430     ###########################################################################
1431
1432     def check_signed_by_key(self):
1433         """Ensure the .changes is signed by an authorized uploader."""
1434         session = DBConn().session()
1435
1436         # First of all we check that the person has proper upload permissions
1437         # and that this upload isn't blocked
1438         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1439
1440         if fpr is None:
1441             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1442             return
1443
1444         # TODO: Check that import-keyring adds UIDs properly
1445         if not fpr.uid:
1446             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1447             return
1448
1449         # Check that the fingerprint which uploaded has permission to do so
1450         self.check_upload_permissions(fpr, session)
1451
1452         # Check that this package is not in a transition
1453         self.check_transition(session)
1454
1455         session.close()
1456
1457
1458     def check_upload_permissions(self, fpr, session):
1459         # Check any one-off upload blocks
1460         self.check_upload_blocks(fpr, session)
1461
1462         # Start with DM as a special case
1463         # DM is a special case unfortunately, so we check it first
1464         # (keys with no source access get more access than DMs in one
1465         #  way; DMs can only upload for their packages whether source
1466         #  or binary, whereas keys with no access might be able to
1467         #  upload some binaries)
1468         if fpr.source_acl.access_level == 'dm':
1469             self.check_dm_upload(fpr, session)
1470         else:
1471             # Check source-based permissions for other types
1472             if self.pkg.changes["architecture"].has_key("source") and \
1473                 fpr.source_acl.access_level is None:
1474                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1475                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1476                 self.rejects.append(rej)
1477                 return
1478             # If not a DM, we allow full upload rights
1479             uid_email = "%s@debian.org" % (fpr.uid.uid)
1480             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1481
1482
1483         # Check binary upload permissions
1484         # By this point we know that DMs can't have got here unless they
1485         # are allowed to deal with the package concerned so just apply
1486         # normal checks
1487         if fpr.binary_acl.access_level == 'full':
1488             return
1489
1490         # Otherwise we're in the map case
1491         tmparches = self.pkg.changes["architecture"].copy()
1492         tmparches.pop('source', None)
1493
1494         for bam in fpr.binary_acl_map:
1495             tmparches.pop(bam.architecture.arch_string, None)
1496
1497         if len(tmparches.keys()) > 0:
1498             if fpr.binary_reject:
1499                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1500                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1501                 self.rejects.append(rej)
1502             else:
1503                 # TODO: This is where we'll implement reject vs throw away binaries later
1504                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1505                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1506                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1507                 self.rejects.append(rej)
1508
1509
1510     def check_upload_blocks(self, fpr, session):
1511         """Check whether any upload blocks apply to this source, source
1512            version, uid / fpr combination"""
1513
1514         def block_rej_template(fb):
1515             rej = 'Manual upload block in place for package %s' % fb.source
1516             if fb.version is not None:
1517                 rej += ', version %s' % fb.version
1518             return rej
1519
1520         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1521             # version is None if the block applies to all versions
1522             if fb.version is None or fb.version == self.pkg.changes['version']:
1523                 # Check both fpr and uid - either is enough to cause a reject
1524                 if fb.fpr is not None:
1525                     if fb.fpr.fingerprint == fpr.fingerprint:
1526                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1527                 if fb.uid is not None:
1528                     if fb.uid == fpr.uid:
1529                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1530
1531
1532     def check_dm_upload(self, fpr, session):
1533         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1534         ## none of the uploaded packages are NEW
1535         rej = False
1536         for f in self.pkg.files.keys():
1537             if self.pkg.files[f].has_key("byhand"):
1538                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1539                 rej = True
1540             if self.pkg.files[f].has_key("new"):
1541                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1542                 rej = True
1543
1544         if rej:
1545             return
1546
1547         ## the most recent version of the package uploaded to unstable or
1548         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1549         ## section of its control file
1550         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1551         q = q.join(SrcAssociation)
1552         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1553         q = q.order_by(desc('source.version')).limit(1)
1554
1555         r = q.all()
1556
1557         if len(r) != 1:
1558             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1559             self.rejects.append(rej)
1560             return
1561
1562         r = r[0]
1563         if not r.dm_upload_allowed:
1564             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1565             self.rejects.append(rej)
1566             return
1567
1568         ## the Maintainer: field of the uploaded .changes file corresponds with
1569         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1570         ## uploads)
1571         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1572             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1573
1574         ## the most recent version of the package uploaded to unstable or
1575         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1576         ## non-developer maintainers cannot NMU or hijack packages)
1577
1578         # srcuploaders includes the maintainer
1579         accept = False
1580         for sup in r.srcuploaders:
1581             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1582             # Eww - I hope we never have two people with the same name in Debian
1583             if email == fpr.uid.uid or name == fpr.uid.name:
1584                 accept = True
1585                 break
1586
1587         if not accept:
1588             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1589             return
1590
1591         ## none of the packages are being taken over from other source packages
1592         for b in self.pkg.changes["binary"].keys():
1593             for suite in self.pkg.changes["distribution"].keys():
1594                 q = session.query(DBSource)
1595                 q = q.join(DBBinary).filter_by(package=b)
1596                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1597
1598                 for s in q.all():
1599                     if s.source != self.pkg.changes["source"]:
1600                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1601
1602
1603
1604     def check_transition(self, session):
1605         cnf = Config()
1606
1607         sourcepkg = self.pkg.changes["source"]
1608
1609         # No sourceful upload -> no need to do anything else, direct return
1610         # We also work with unstable uploads, not experimental or those going to some
1611         # proposed-updates queue
1612         if "source" not in self.pkg.changes["architecture"] or \
1613            "unstable" not in self.pkg.changes["distribution"]:
1614             return
1615
1616         # Also only check if there is a file defined (and existant) with
1617         # checks.
1618         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1619         if transpath == "" or not os.path.exists(transpath):
1620             return
1621
1622         # Parse the yaml file
1623         sourcefile = file(transpath, 'r')
1624         sourcecontent = sourcefile.read()
1625         try:
1626             transitions = yaml.load(sourcecontent)
1627         except yaml.YAMLError, msg:
1628             # This shouldn't happen, there is a wrapper to edit the file which
1629             # checks it, but we prefer to be safe than ending up rejecting
1630             # everything.
1631             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1632             return
1633
1634         # Now look through all defined transitions
1635         for trans in transitions:
1636             t = transitions[trans]
1637             source = t["source"]
1638             expected = t["new"]
1639
1640             # Will be None if nothing is in testing.
1641             current = get_source_in_suite(source, "testing", session)
1642             if current is not None:
1643                 compare = apt_pkg.VersionCompare(current.version, expected)
1644
1645             if current is None or compare < 0:
1646                 # This is still valid, the current version in testing is older than
1647                 # the new version we wait for, or there is none in testing yet
1648
1649                 # Check if the source we look at is affected by this.
1650                 if sourcepkg in t['packages']:
1651                     # The source is affected, lets reject it.
1652
1653                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1654                         sourcepkg, trans)
1655
1656                     if current is not None:
1657                         currentlymsg = "at version %s" % (current.version)
1658                     else:
1659                         currentlymsg = "not present in testing"
1660
1661                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1662
1663                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1664 is part of a testing transition designed to get %s migrated (it is
1665 currently %s, we need version %s).  This transition is managed by the
1666 Release Team, and %s is the Release-Team member responsible for it.
1667 Please mail debian-release@lists.debian.org or contact %s directly if you
1668 need further assistance.  You might want to upload to experimental until this
1669 transition is done."""
1670                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1671
1672                     self.rejects.append(rejectmsg)
1673                     return
1674
1675     ###########################################################################
1676     # End check_signed_by_key checks
1677     ###########################################################################
1678
1679     def build_summaries(self):
1680         """ Build a summary of changes the upload introduces. """
1681
1682         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1683
1684         short_summary = summary
1685
1686         # This is for direport's benefit...
1687         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1688
1689         if byhand or new:
1690             summary += "Changes: " + f
1691
1692         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1693
1694         summary += self.announce(short_summary, 0)
1695
1696         return (summary, short_summary)
1697
1698     ###########################################################################
1699
1700     def close_bugs(self, summary, action):
1701         """
1702         Send mail to close bugs as instructed by the closes field in the changes file.
1703         Also add a line to summary if any work was done.
1704
1705         @type summary: string
1706         @param summary: summary text, as given by L{build_summaries}
1707
1708         @type action: bool
1709         @param action: Set to false no real action will be done.
1710
1711         @rtype: string
1712         @return: summary. If action was taken, extended by the list of closed bugs.
1713
1714         """
1715
1716         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1717
1718         bugs = self.pkg.changes["closes"].keys()
1719
1720         if not bugs:
1721             return summary
1722
1723         bugs.sort()
1724         summary += "Closing bugs: "
1725         for bug in bugs:
1726             summary += "%s " % (bug)
1727             if action:
1728                 self.update_subst()
1729                 self.Subst["__BUG_NUMBER__"] = bug
1730                 if self.pkg.changes["distribution"].has_key("stable"):
1731                     self.Subst["__STABLE_WARNING__"] = """
1732 Note that this package is not part of the released stable Debian
1733 distribution.  It may have dependencies on other unreleased software,
1734 or other instabilities.  Please take care if you wish to install it.
1735 The update will eventually make its way into the next released Debian
1736 distribution."""
1737                 else:
1738                     self.Subst["__STABLE_WARNING__"] = ""
1739                 mail_message = utils.TemplateSubst(self.Subst, template)
1740                 utils.send_mail(mail_message)
1741
1742                 # Clear up after ourselves
1743                 del self.Subst["__BUG_NUMBER__"]
1744                 del self.Subst["__STABLE_WARNING__"]
1745
1746         if action and self.logger:
1747             self.logger.log(["closing bugs"] + bugs)
1748
1749         summary += "\n"
1750
1751         return summary
1752
1753     ###########################################################################
1754
1755     def announce(self, short_summary, action):
1756         """
1757         Send an announce mail about a new upload.
1758
1759         @type short_summary: string
1760         @param short_summary: Short summary text to include in the mail
1761
1762         @type action: bool
1763         @param action: Set to false no real action will be done.
1764
1765         @rtype: string
1766         @return: Textstring about action taken.
1767
1768         """
1769
1770         cnf = Config()
1771         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1772
1773         # Only do announcements for source uploads with a recent dpkg-dev installed
1774         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1775            self.pkg.changes["architecture"].has_key("source"):
1776             return ""
1777
1778         lists_done = {}
1779         summary = ""
1780
1781         self.Subst["__SHORT_SUMMARY__"] = short_summary
1782
1783         for dist in self.pkg.changes["distribution"].keys():
1784             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1785             if announce_list == "" or lists_done.has_key(announce_list):
1786                 continue
1787
1788             lists_done[announce_list] = 1
1789             summary += "Announcing to %s\n" % (announce_list)
1790
1791             if action:
1792                 self.update_subst()
1793                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1794                 if cnf.get("Dinstall::TrackingServer") and \
1795                    self.pkg.changes["architecture"].has_key("source"):
1796                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1797                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1798
1799                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1800                 utils.send_mail(mail_message)
1801
1802                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1803
1804         if cnf.FindB("Dinstall::CloseBugs"):
1805             summary = self.close_bugs(summary, action)
1806
1807         del self.Subst["__SHORT_SUMMARY__"]
1808
1809         return summary
1810
1811     ###########################################################################
1812     @session_wrapper
1813     def accept (self, summary, short_summary, session=None):
1814         """
1815         Accept an upload.
1816
1817         This moves all files referenced from the .changes into the pool,
1818         sends the accepted mail, announces to lists, closes bugs and
1819         also checks for override disparities. If enabled it will write out
1820         the version history for the BTS Version Tracking and will finally call
1821         L{queue_build}.
1822
1823         @type summary: string
1824         @param summary: Summary text
1825
1826         @type short_summary: string
1827         @param short_summary: Short summary
1828         """
1829
1830         cnf = Config()
1831         stats = SummaryStats()
1832
1833         print "Installing."
1834         self.logger.log(["installing changes", self.pkg.changes_file])
1835
1836         poolfiles = []
1837
1838         # Add the .dsc file to the DB first
1839         for newfile, entry in self.pkg.files.items():
1840             if entry["type"] == "dsc":
1841                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1842                 for j in pfs:
1843                     poolfiles.append(j)
1844
1845         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1846         for newfile, entry in self.pkg.files.items():
1847             if entry["type"] == "deb":
1848                 poolfiles.append(add_deb_to_db(self, newfile, session))
1849
1850         # If this is a sourceful diff only upload that is moving
1851         # cross-component we need to copy the .orig files into the new
1852         # component too for the same reasons as above.
1853         # XXX: mhy: I think this should be in add_dsc_to_db
1854         if self.pkg.changes["architecture"].has_key("source"):
1855             for orig_file in self.pkg.orig_files.keys():
1856                 if not self.pkg.orig_files[orig_file].has_key("id"):
1857                     continue # Skip if it's not in the pool
1858                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1859                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1860                     continue # Skip if the location didn't change
1861
1862                 # Do the move
1863                 oldf = get_poolfile_by_id(orig_file_id, session)
1864                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1865                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1866                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1867
1868                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1869
1870                 # TODO: Care about size/md5sum collisions etc
1871                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1872
1873                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1874                 if newf is None:
1875                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1876                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1877
1878                     session.flush()
1879
1880                     # Don't reference the old file from this changes
1881                     for p in poolfiles:
1882                         if p.file_id == oldf.file_id:
1883                             poolfiles.remove(p)
1884
1885                     poolfiles.append(newf)
1886
1887                     # Fix up the DSC references
1888                     toremove = []
1889
1890                     for df in source.srcfiles:
1891                         if df.poolfile.file_id == oldf.file_id:
1892                             # Add a new DSC entry and mark the old one for deletion
1893                             # Don't do it in the loop so we don't change the thing we're iterating over
1894                             newdscf = DSCFile()
1895                             newdscf.source_id = source.source_id
1896                             newdscf.poolfile_id = newf.file_id
1897                             session.add(newdscf)
1898
1899                             toremove.append(df)
1900
1901                     for df in toremove:
1902                         session.delete(df)
1903
1904                     # Flush our changes
1905                     session.flush()
1906
1907                     # Make sure that our source object is up-to-date
1908                     session.expire(source)
1909
1910         # Add changelog information to the database
1911         self.store_changelog()
1912
1913         # Install the files into the pool
1914         for newfile, entry in self.pkg.files.items():
1915             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1916             utils.move(newfile, destination)
1917             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1918             stats.accept_bytes += float(entry["size"])
1919
1920         # Copy the .changes file across for suite which need it.
1921         copy_changes = {}
1922         for suite_name in self.pkg.changes["distribution"].keys():
1923             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1924                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1925
1926         for dest in copy_changes.keys():
1927             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1928
1929         # We're done - commit the database changes
1930         session.commit()
1931         # Our SQL session will automatically start a new transaction after
1932         # the last commit
1933
1934         # Move the .changes into the 'done' directory
1935         utils.move(self.pkg.changes_file,
1936                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1937
1938         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1939             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1940
1941         self.update_subst()
1942         self.Subst["__SUMMARY__"] = summary
1943         mail_message = utils.TemplateSubst(self.Subst,
1944                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1945         utils.send_mail(mail_message)
1946         self.announce(short_summary, 1)
1947
1948         ## Helper stuff for DebBugs Version Tracking
1949         if cnf.Find("Dir::Queue::BTSVersionTrack"):
1950             if self.pkg.changes["architecture"].has_key("source"):
1951                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952                 version_history = os.fdopen(fd, 'w')
1953                 version_history.write(self.pkg.dsc["bts changelog"])
1954                 version_history.close()
1955                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1956                                       self.pkg.changes_file[:-8]+".versions")
1957                 os.rename(temp_filename, filename)
1958                 os.chmod(filename, 0644)
1959
1960             # Write out the binary -> source mapping.
1961             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1962             debinfo = os.fdopen(fd, 'w')
1963             for name, entry in sorted(self.pkg.files.items()):
1964                 if entry["type"] == "deb":
1965                     line = " ".join([entry["package"], entry["version"],
1966                                      entry["architecture"], entry["source package"],
1967                                      entry["source version"]])
1968                     debinfo.write(line+"\n")
1969             debinfo.close()
1970             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1971                                   self.pkg.changes_file[:-8]+".debinfo")
1972             os.rename(temp_filename, filename)
1973             os.chmod(filename, 0644)
1974
1975         session.commit()
1976
1977         # Set up our copy queues (e.g. buildd queues)
1978         for suite_name in self.pkg.changes["distribution"].keys():
1979             suite = get_suite(suite_name, session)
1980             for q in suite.copy_queues:
1981                 for f in poolfiles:
1982                     q.add_file_from_pool(f)
1983
1984         session.commit()
1985
1986         # Finally...
1987         stats.accept_count += 1
1988
1989     def check_override(self):
1990         """
1991         Checks override entries for validity. Mails "Override disparity" warnings,
1992         if that feature is enabled.
1993
1994         Abandons the check if
1995           - override disparity checks are disabled
1996           - mail sending is disabled
1997         """
1998
1999         cnf = Config()
2000
2001         # Abandon the check if override disparity checks have been disabled
2002         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2003             return
2004
2005         summary = self.pkg.check_override()
2006
2007         if summary == "":
2008             return
2009
2010         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2011
2012         self.update_subst()
2013         self.Subst["__SUMMARY__"] = summary
2014         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2015         utils.send_mail(mail_message)
2016         del self.Subst["__SUMMARY__"]
2017
2018     ###########################################################################
2019
2020     def remove(self, from_dir=None):
2021         """
2022         Used (for instance) in p-u to remove the package from unchecked
2023
2024         Also removes the package from holding area.
2025         """
2026         if from_dir is None:
2027             from_dir = self.pkg.directory
2028         h = Holding()
2029
2030         for f in self.pkg.files.keys():
2031             os.unlink(os.path.join(from_dir, f))
2032             if os.path.exists(os.path.join(h.holding_dir, f)):
2033                 os.unlink(os.path.join(h.holding_dir, f))
2034
2035         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2036         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2037             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2038
2039     ###########################################################################
2040
2041     def move_to_queue (self, queue):
2042         """
2043         Move files to a destination queue using the permissions in the table
2044         """
2045         h = Holding()
2046         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2047                    queue.path, perms=int(queue.change_perms, 8))
2048         for f in self.pkg.files.keys():
2049             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2050
2051     ###########################################################################
2052
2053     def force_reject(self, reject_files):
2054         """
2055         Forcefully move files from the current directory to the
2056         reject directory.  If any file already exists in the reject
2057         directory it will be moved to the morgue to make way for
2058         the new file.
2059
2060         @type reject_files: dict
2061         @param reject_files: file dictionary
2062
2063         """
2064
2065         cnf = Config()
2066
2067         for file_entry in reject_files:
2068             # Skip any files which don't exist or which we don't have permission to copy.
2069             if os.access(file_entry, os.R_OK) == 0:
2070                 continue
2071
2072             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2073
2074             try:
2075                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2076             except OSError, e:
2077                 # File exists?  Let's find a new name by adding a number
2078                 if e.errno == errno.EEXIST:
2079                     try:
2080                         dest_file = utils.find_next_free(dest_file, 255)
2081                     except NoFreeFilenameError:
2082                         # Something's either gone badly Pete Tong, or
2083                         # someone is trying to exploit us.
2084                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2085                         return
2086
2087                     # Make sure we really got it
2088                     try:
2089                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2090                     except OSError, e:
2091                         # Likewise
2092                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2093                         return
2094                 else:
2095                     raise
2096             # If we got here, we own the destination file, so we can
2097             # safely overwrite it.
2098             utils.move(file_entry, dest_file, 1, perms=0660)
2099             os.close(dest_fd)
2100
2101     ###########################################################################
2102     def do_reject (self, manual=0, reject_message="", notes=""):
2103         """
2104         Reject an upload. If called without a reject message or C{manual} is
2105         true, spawn an editor so the user can write one.
2106
2107         @type manual: bool
2108         @param manual: manual or automated rejection
2109
2110         @type reject_message: string
2111         @param reject_message: A reject message
2112
2113         @return: 0
2114
2115         """
2116         # If we weren't given a manual rejection message, spawn an
2117         # editor so the user can add one in...
2118         if manual and not reject_message:
2119             (fd, temp_filename) = utils.temp_filename()
2120             temp_file = os.fdopen(fd, 'w')
2121             if len(notes) > 0:
2122                 for note in notes:
2123                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2124                                     % (note.author, note.version, note.notedate, note.comment))
2125             temp_file.close()
2126             editor = os.environ.get("EDITOR","vi")
2127             answer = 'E'
2128             while answer == 'E':
2129                 os.system("%s %s" % (editor, temp_filename))
2130                 temp_fh = utils.open_file(temp_filename)
2131                 reject_message = "".join(temp_fh.readlines())
2132                 temp_fh.close()
2133                 print "Reject message:"
2134                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2135                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2136                 answer = "XXX"
2137                 while prompt.find(answer) == -1:
2138                     answer = utils.our_raw_input(prompt)
2139                     m = re_default_answer.search(prompt)
2140                     if answer == "":
2141                         answer = m.group(1)
2142                     answer = answer[:1].upper()
2143             os.unlink(temp_filename)
2144             if answer == 'A':
2145                 return 1
2146             elif answer == 'Q':
2147                 sys.exit(0)
2148
2149         print "Rejecting.\n"
2150
2151         cnf = Config()
2152
2153         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2154         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2155
2156         # Move all the files into the reject directory
2157         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2158         self.force_reject(reject_files)
2159
2160         # If we fail here someone is probably trying to exploit the race
2161         # so let's just raise an exception ...
2162         if os.path.exists(reason_filename):
2163             os.unlink(reason_filename)
2164         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2165
2166         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2167
2168         self.update_subst()
2169         if not manual:
2170             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2171             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2172             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2173             os.write(reason_fd, reject_message)
2174             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2175         else:
2176             # Build up the rejection email
2177             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2178             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2179             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2180             self.Subst["__REJECT_MESSAGE__"] = ""
2181             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2182             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2183             # Write the rejection email out as the <foo>.reason file
2184             os.write(reason_fd, reject_mail_message)
2185
2186         del self.Subst["__REJECTOR_ADDRESS__"]
2187         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2188         del self.Subst["__CC__"]
2189
2190         os.close(reason_fd)
2191
2192         # Send the rejection mail
2193         utils.send_mail(reject_mail_message)
2194
2195         if self.logger:
2196             self.logger.log(["rejected", self.pkg.changes_file])
2197
2198         return 0
2199
2200     ################################################################################
2201     def in_override_p(self, package, component, suite, binary_type, filename, session):
2202         """
2203         Check if a package already has override entries in the DB
2204
2205         @type package: string
2206         @param package: package name
2207
2208         @type component: string
2209         @param component: database id of the component
2210
2211         @type suite: int
2212         @param suite: database id of the suite
2213
2214         @type binary_type: string
2215         @param binary_type: type of the package
2216
2217         @type filename: string
2218         @param filename: filename we check
2219
2220         @return: the database result. But noone cares anyway.
2221
2222         """
2223
2224         cnf = Config()
2225
2226         if binary_type == "": # must be source
2227             file_type = "dsc"
2228         else:
2229             file_type = binary_type
2230
2231         # Override suite name; used for example with proposed-updates
2232         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2233             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2234
2235         result = get_override(package, suite, component, file_type, session)
2236
2237         # If checking for a source package fall back on the binary override type
2238         if file_type == "dsc" and len(result) < 1:
2239             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2240
2241         # Remember the section and priority so we can check them later if appropriate
2242         if len(result) > 0:
2243             result = result[0]
2244             self.pkg.files[filename]["override section"] = result.section.section
2245             self.pkg.files[filename]["override priority"] = result.priority.priority
2246             return result
2247
2248         return None
2249
2250     ################################################################################
2251     def get_anyversion(self, sv_list, suite):
2252         """
2253         @type sv_list: list
2254         @param sv_list: list of (suite, version) tuples to check
2255
2256         @type suite: string
2257         @param suite: suite name
2258
2259         Description: TODO
2260         """
2261         Cnf = Config()
2262         anyversion = None
2263         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2264         for (s, v) in sv_list:
2265             if s in [ x.lower() for x in anysuite ]:
2266                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2267                     anyversion = v
2268
2269         return anyversion
2270
2271     ################################################################################
2272
2273     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2274         """
2275         @type sv_list: list
2276         @param sv_list: list of (suite, version) tuples to check
2277
2278         @type filename: string
2279         @param filename: XXX
2280
2281         @type new_version: string
2282         @param new_version: XXX
2283
2284         Ensure versions are newer than existing packages in target
2285         suites and that cross-suite version checking rules as
2286         set out in the conf file are satisfied.
2287         """
2288
2289         cnf = Config()
2290
2291         # Check versions for each target suite
2292         for target_suite in self.pkg.changes["distribution"].keys():
2293             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2294             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2295
2296             # Enforce "must be newer than target suite" even if conffile omits it
2297             if target_suite not in must_be_newer_than:
2298                 must_be_newer_than.append(target_suite)
2299
2300             for (suite, existent_version) in sv_list:
2301                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2302
2303                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2304                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2305
2306                 if suite in must_be_older_than and vercmp > -1:
2307                     cansave = 0
2308
2309                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2310                         # we really use the other suite, ignoring the conflicting one ...
2311                         addsuite = self.pkg.changes["distribution-version"][suite]
2312
2313                         add_version = self.get_anyversion(sv_list, addsuite)
2314                         target_version = self.get_anyversion(sv_list, target_suite)
2315
2316                         if not add_version:
2317                             # not add_version can only happen if we map to a suite
2318                             # that doesn't enhance the suite we're propup'ing from.
2319                             # so "propup-ver x a b c; map a d" is a problem only if
2320                             # d doesn't enhance a.
2321                             #
2322                             # i think we could always propagate in this case, rather
2323                             # than complaining. either way, this isn't a REJECT issue
2324                             #
2325                             # And - we really should complain to the dorks who configured dak
2326                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2327                             self.pkg.changes.setdefault("propdistribution", {})
2328                             self.pkg.changes["propdistribution"][addsuite] = 1
2329                             cansave = 1
2330                         elif not target_version:
2331                             # not targets_version is true when the package is NEW
2332                             # we could just stick with the "...old version..." REJECT
2333                             # for this, I think.
2334                             self.rejects.append("Won't propogate NEW packages.")
2335                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2336                             # propogation would be redundant. no need to reject though.
2337                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2338                             cansave = 1
2339                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2340                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2341                             # propogate!!
2342                             self.warnings.append("Propogating upload to %s" % (addsuite))
2343                             self.pkg.changes.setdefault("propdistribution", {})
2344                             self.pkg.changes["propdistribution"][addsuite] = 1
2345                             cansave = 1
2346
2347                     if not cansave:
2348                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2349
2350     ################################################################################
2351     def check_binary_against_db(self, filename, session):
2352         # Ensure version is sane
2353         q = session.query(BinAssociation)
2354         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2355         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2356
2357         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2358                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2359
2360         # Check for any existing copies of the file
2361         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2362         q = q.filter_by(version=self.pkg.files[filename]["version"])
2363         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2364
2365         if q.count() > 0:
2366             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2367
2368     ################################################################################
2369
2370     def check_source_against_db(self, filename, session):
2371         source = self.pkg.dsc.get("source")
2372         version = self.pkg.dsc.get("version")
2373
2374         # Ensure version is sane
2375         q = session.query(SrcAssociation)
2376         q = q.join(DBSource).filter(DBSource.source==source)
2377
2378         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2379                                        filename, version, sourceful=True)
2380
2381     ################################################################################
2382     def check_dsc_against_db(self, filename, session):
2383         """
2384
2385         @warning: NB: this function can remove entries from the 'files' index [if
2386          the orig tarball is a duplicate of the one in the archive]; if
2387          you're iterating over 'files' and call this function as part of
2388          the loop, be sure to add a check to the top of the loop to
2389          ensure you haven't just tried to dereference the deleted entry.
2390
2391         """
2392
2393         Cnf = Config()
2394         self.pkg.orig_files = {} # XXX: do we need to clear it?
2395         orig_files = self.pkg.orig_files
2396
2397         # Try and find all files mentioned in the .dsc.  This has
2398         # to work harder to cope with the multiple possible
2399         # locations of an .orig.tar.gz.
2400         # The ordering on the select is needed to pick the newest orig
2401         # when it exists in multiple places.
2402         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2403             found = None
2404             if self.pkg.files.has_key(dsc_name):
2405                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2406                 actual_size = int(self.pkg.files[dsc_name]["size"])
2407                 found = "%s in incoming" % (dsc_name)
2408
2409                 # Check the file does not already exist in the archive
2410                 ql = get_poolfile_like_name(dsc_name, session)
2411
2412                 # Strip out anything that isn't '%s' or '/%s$'
2413                 for i in ql:
2414                     if not i.filename.endswith(dsc_name):
2415                         ql.remove(i)
2416
2417                 # "[dak] has not broken them.  [dak] has fixed a
2418                 # brokenness.  Your crappy hack exploited a bug in
2419                 # the old dinstall.
2420                 #
2421                 # "(Come on!  I thought it was always obvious that
2422                 # one just doesn't release different files with
2423                 # the same name and version.)"
2424                 #                        -- ajk@ on d-devel@l.d.o
2425
2426                 if len(ql) > 0:
2427                     # Ignore exact matches for .orig.tar.gz
2428                     match = 0
2429                     if re_is_orig_source.match(dsc_name):
2430                         for i in ql:
2431                             if self.pkg.files.has_key(dsc_name) and \
2432                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2433                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2434                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2435                                 # TODO: Don't delete the entry, just mark it as not needed
2436                                 # This would fix the stupidity of changing something we often iterate over
2437                                 # whilst we're doing it
2438                                 del self.pkg.files[dsc_name]
2439                                 dsc_entry["files id"] = i.file_id
2440                                 if not orig_files.has_key(dsc_name):
2441                                     orig_files[dsc_name] = {}
2442                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2443                                 match = 1
2444
2445                                 # Don't bitch that we couldn't find this file later
2446                                 try:
2447                                     self.later_check_files.remove(dsc_name)
2448                                 except ValueError:
2449                                     pass
2450
2451
2452                     if not match:
2453                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2454
2455             elif re_is_orig_source.match(dsc_name):
2456                 # Check in the pool
2457                 ql = get_poolfile_like_name(dsc_name, session)
2458
2459                 # Strip out anything that isn't '%s' or '/%s$'
2460                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2461                 for i in ql:
2462                     if not i.filename.endswith(dsc_name):
2463                         ql.remove(i)
2464
2465                 if len(ql) > 0:
2466                     # Unfortunately, we may get more than one match here if,
2467                     # for example, the package was in potato but had an -sa
2468                     # upload in woody.  So we need to choose the right one.
2469
2470                     # default to something sane in case we don't match any or have only one
2471                     x = ql[0]
2472
2473                     if len(ql) > 1:
2474                         for i in ql:
2475                             old_file = os.path.join(i.location.path, i.filename)
2476                             old_file_fh = utils.open_file(old_file)
2477                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2478                             old_file_fh.close()
2479                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2480                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2481                                 x = i
2482
2483                     old_file = os.path.join(i.location.path, i.filename)
2484                     old_file_fh = utils.open_file(old_file)
2485                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2486                     old_file_fh.close()
2487                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2488                     found = old_file
2489                     suite_type = x.location.archive_type
2490                     # need this for updating dsc_files in install()
2491                     dsc_entry["files id"] = x.file_id
2492                     # See install() in process-accepted...
2493                     if not orig_files.has_key(dsc_name):
2494                         orig_files[dsc_name] = {}
2495                     orig_files[dsc_name]["id"] = x.file_id
2496                     orig_files[dsc_name]["path"] = old_file
2497                     orig_files[dsc_name]["location"] = x.location.location_id
2498                 else:
2499                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2500                     # Not there? Check the queue directories...
2501                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2502                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2503                             continue
2504                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2505                         if os.path.exists(in_otherdir):
2506                             in_otherdir_fh = utils.open_file(in_otherdir)
2507                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2508                             in_otherdir_fh.close()
2509                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2510                             found = in_otherdir
2511                             if not orig_files.has_key(dsc_name):
2512                                 orig_files[dsc_name] = {}
2513                             orig_files[dsc_name]["path"] = in_otherdir
2514
2515                     if not found:
2516                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2517                         continue
2518             else:
2519                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2520                 continue
2521             if actual_md5 != dsc_entry["md5sum"]:
2522                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2523             if actual_size != int(dsc_entry["size"]):
2524                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2525
2526     ################################################################################
2527     # This is used by process-new and process-holding to recheck a changes file
2528     # at the time we're running.  It mainly wraps various other internal functions
2529     # and is similar to accepted_checks - these should probably be tidied up
2530     # and combined
2531     def recheck(self, session):
2532         cnf = Config()
2533         for f in self.pkg.files.keys():
2534             # The .orig.tar.gz can disappear out from under us is it's a
2535             # duplicate of one in the archive.
2536             if not self.pkg.files.has_key(f):
2537                 continue
2538
2539             entry = self.pkg.files[f]
2540
2541             # Check that the source still exists
2542             if entry["type"] == "deb":
2543                 source_version = entry["source version"]
2544                 source_package = entry["source package"]
2545                 if not self.pkg.changes["architecture"].has_key("source") \
2546                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2547                     source_epochless_version = re_no_epoch.sub('', source_version)
2548                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2549                     found = False
2550                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2551                         if cnf.has_key("Dir::Queue::%s" % (q)):
2552                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2553                                 found = True
2554                     if not found:
2555                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2556
2557             # Version and file overwrite checks
2558             if entry["type"] == "deb":
2559                 self.check_binary_against_db(f, session)
2560             elif entry["type"] == "dsc":
2561                 self.check_source_against_db(f, session)
2562                 self.check_dsc_against_db(f, session)
2563
2564     ################################################################################
2565     def accepted_checks(self, overwrite_checks, session):
2566         # Recheck anything that relies on the database; since that's not
2567         # frozen between accept and our run time when called from p-a.
2568
2569         # overwrite_checks is set to False when installing to stable/oldstable
2570
2571         propogate={}
2572         nopropogate={}
2573
2574         # Find the .dsc (again)
2575         dsc_filename = None
2576         for f in self.pkg.files.keys():
2577             if self.pkg.files[f]["type"] == "dsc":
2578                 dsc_filename = f
2579
2580         for checkfile in self.pkg.files.keys():
2581             # The .orig.tar.gz can disappear out from under us is it's a
2582             # duplicate of one in the archive.
2583             if not self.pkg.files.has_key(checkfile):
2584                 continue
2585
2586             entry = self.pkg.files[checkfile]
2587
2588             # Check that the source still exists
2589             if entry["type"] == "deb":
2590                 source_version = entry["source version"]
2591                 source_package = entry["source package"]
2592                 if not self.pkg.changes["architecture"].has_key("source") \
2593                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2594                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2595
2596             # Version and file overwrite checks
2597             if overwrite_checks:
2598                 if entry["type"] == "deb":
2599                     self.check_binary_against_db(checkfile, session)
2600                 elif entry["type"] == "dsc":
2601                     self.check_source_against_db(checkfile, session)
2602                     self.check_dsc_against_db(dsc_filename, session)
2603
2604             # propogate in the case it is in the override tables:
2605             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2606                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2607                     propogate[suite] = 1
2608                 else:
2609                     nopropogate[suite] = 1
2610
2611         for suite in propogate.keys():
2612             if suite in nopropogate:
2613                 continue
2614             self.pkg.changes["distribution"][suite] = 1
2615
2616         for checkfile in self.pkg.files.keys():
2617             # Check the package is still in the override tables
2618             for suite in self.pkg.changes["distribution"].keys():
2619                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2620                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2621
2622     ################################################################################
2623     # This is not really a reject, but an unaccept, but since a) the code for
2624     # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2625     # extremely rare, for now we'll go with whining at our admin folks...
2626
2627     def do_unaccept(self):
2628         cnf = Config()
2629
2630         self.update_subst()
2631         self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2632         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2633         self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2634         self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2635         if cnf.has_key("Dinstall::Bcc"):
2636             self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2637
2638         template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2639
2640         reject_mail_message = utils.TemplateSubst(self.Subst, template)
2641
2642         # Write the rejection email out as the <foo>.reason file
2643         reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2644         reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2645
2646         # If we fail here someone is probably trying to exploit the race
2647         # so let's just raise an exception ...
2648         if os.path.exists(reject_filename):
2649             os.unlink(reject_filename)
2650
2651         fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2652         os.write(fd, reject_mail_message)
2653         os.close(fd)
2654
2655         utils.send_mail(reject_mail_message)
2656
2657         del self.Subst["__REJECTOR_ADDRESS__"]
2658         del self.Subst["__REJECT_MESSAGE__"]
2659         del self.Subst["__CC__"]
2660
2661     ################################################################################
2662     # If any file of an upload has a recent mtime then chances are good
2663     # the file is still being uploaded.
2664
2665     def upload_too_new(self):
2666         cnf = Config()
2667         too_new = False
2668         # Move back to the original directory to get accurate time stamps
2669         cwd = os.getcwd()
2670         os.chdir(self.pkg.directory)
2671         file_list = self.pkg.files.keys()
2672         file_list.extend(self.pkg.dsc_files.keys())
2673         file_list.append(self.pkg.changes_file)
2674         for f in file_list:
2675             try:
2676                 last_modified = time.time()-os.path.getmtime(f)
2677                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2678                     too_new = True
2679                     break
2680             except:
2681                 pass
2682
2683         os.chdir(cwd)
2684         return too_new
2685
2686     def store_changelog(self):
2687
2688         # Skip binary-only upload if it is not a bin-NMU
2689         if not self.pkg.changes['architecture'].has_key('source'):
2690             from daklib.regexes import re_bin_only_nmu
2691             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2692                 return
2693
2694         session = DBConn().session()
2695
2696         # Check if upload already has a changelog entry
2697         query = """SELECT changelog_id FROM changes WHERE source = :source
2698                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2699         if session.execute(query, {'source': self.pkg.changes['source'], \
2700                                    'version': self.pkg.changes['version'], \
2701                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2702             session.commit()
2703             return
2704
2705         # Add current changelog text into changelogs_text table, return created ID
2706         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2707         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2708
2709         # Link ID to the upload available in changes table
2710         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2711                    AND version = :version AND architecture = :architecture"""
2712         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2713                                 'version': self.pkg.changes['version'], \
2714                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2715
2716         session.commit()