]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge commit 'ftpmaster/master'
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113
114     # Build up a list of potentially new things
115     for name, f in files.items():
116         # Skip byhand elements
117 #        if f["type"] == "byhand":
118 #            continue
119         pkg = f["package"]
120         priority = f["priority"]
121         section = f["section"]
122         file_type = get_type(f, session)
123         component = f["component"]
124
125         if file_type == "dsc":
126             priority = "source"
127
128         if not new.has_key(pkg):
129             new[pkg] = {}
130             new[pkg]["priority"] = priority
131             new[pkg]["section"] = section
132             new[pkg]["type"] = file_type
133             new[pkg]["component"] = component
134             new[pkg]["files"] = []
135         else:
136             old_type = new[pkg]["type"]
137             if old_type != file_type:
138                 # source gets trumped by deb or udeb
139                 if old_type == "dsc":
140                     new[pkg]["priority"] = priority
141                     new[pkg]["section"] = section
142                     new[pkg]["type"] = file_type
143                     new[pkg]["component"] = component
144
145         new[pkg]["files"].append(name)
146
147         if f.has_key("othercomponents"):
148             new[pkg]["othercomponents"] = f["othercomponents"]
149
150     # Fix up the list of target suites
151     cnf = Config()
152     for suite in changes["suite"].keys():
153         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154         if override:
155             (olderr, newerr) = (get_suite(suite, session) == None,
156                                 get_suite(override, session) == None)
157             if olderr or newerr:
158                 (oinv, newinv) = ("", "")
159                 if olderr: oinv = "invalid "
160                 if newerr: ninv = "invalid "
161                 print "warning: overriding %ssuite %s to %ssuite %s" % (
162                         oinv, suite, ninv, override)
163             del changes["suite"][suite]
164             changes["suite"][override] = 1
165
166     for suite in changes["suite"].keys():
167         for pkg in new.keys():
168             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169             if len(ql) > 0:
170                 for file_entry in new[pkg]["files"]:
171                     if files[file_entry].has_key("new"):
172                         del files[file_entry]["new"]
173                 del new[pkg]
174
175     if warn:
176         for s in ['stable', 'oldstable']:
177             if changes["suite"].has_key(s):
178                 print "WARNING: overrides will be added for %s!" % s
179         for pkg in new.keys():
180             if new[pkg].has_key("othercomponents"):
181                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
182
183     return new
184
185 ################################################################################
186
187 def check_valid(new, session = None):
188     """
189     Check if section and priority for NEW packages exist in database.
190     Additionally does sanity checks:
191       - debian-installer packages have to be udeb (or source)
192       - non debian-installer packages can not be udeb
193       - source priority can only be assigned to dsc file types
194
195     @type new: dict
196     @param new: Dict of new packages with their section, priority and type.
197
198     """
199     for pkg in new.keys():
200         section_name = new[pkg]["section"]
201         priority_name = new[pkg]["priority"]
202         file_type = new[pkg]["type"]
203
204         section = get_section(section_name, session)
205         if section is None:
206             new[pkg]["section id"] = -1
207         else:
208             new[pkg]["section id"] = section.section_id
209
210         priority = get_priority(priority_name, session)
211         if priority is None:
212             new[pkg]["priority id"] = -1
213         else:
214             new[pkg]["priority id"] = priority.priority_id
215
216         # Sanity checks
217         di = section_name.find("debian-installer") != -1
218
219         # If d-i, we must be udeb and vice-versa
220         if     (di and file_type not in ("udeb", "dsc")) or \
221            (not di and file_type == "udeb"):
222             new[pkg]["section id"] = -1
223
224         # If dsc we need to be source and vice-versa
225         if (priority == "source" and file_type != "dsc") or \
226            (priority != "source" and file_type == "dsc"):
227             new[pkg]["priority id"] = -1
228
229 ###############################################################################
230
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233     def __init__(self, future_cutoff, past_cutoff):
234         self.reset()
235         self.future_cutoff = future_cutoff
236         self.past_cutoff = past_cutoff
237
238     def reset(self):
239         self.future_files = {}
240         self.ancient_files = {}
241
242     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243         if MTime > self.future_cutoff:
244             self.future_files[Name] = MTime
245         if MTime < self.past_cutoff:
246             self.ancient_files[Name] = MTime
247
248 ###############################################################################
249
250 def prod_maintainer(notes, upload):
251     cnf = Config()
252
253     # Here we prepare an editor and get them ready to prod...
254     (fd, temp_filename) = utils.temp_filename()
255     temp_file = os.fdopen(fd, 'w')
256     for note in notes:
257         temp_file.write(note.comment)
258     temp_file.close()
259     editor = os.environ.get("EDITOR","vi")
260     answer = 'E'
261     while answer == 'E':
262         os.system("%s %s" % (editor, temp_filename))
263         temp_fh = utils.open_file(temp_filename)
264         prod_message = "".join(temp_fh.readlines())
265         temp_fh.close()
266         print "Prod message:"
267         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
268         prompt = "[P]rod, Edit, Abandon, Quit ?"
269         answer = "XXX"
270         while prompt.find(answer) == -1:
271             answer = utils.our_raw_input(prompt)
272             m = re_default_answer.search(prompt)
273             if answer == "":
274                 answer = m.group(1)
275             answer = answer[:1].upper()
276     os.unlink(temp_filename)
277     if answer == 'A':
278         return
279     elif answer == 'Q':
280         end()
281         sys.exit(0)
282     # Otherwise, do the proding...
283     user_email_address = utils.whoami() + " <%s>" % (
284         cnf["Dinstall::MyAdminAddress"])
285
286     Subst = upload.Subst
287
288     Subst["__FROM_ADDRESS__"] = user_email_address
289     Subst["__PROD_MESSAGE__"] = prod_message
290     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
291
292     prod_mail_message = utils.TemplateSubst(
293         Subst,cnf["Dir::Templates"]+"/process-new.prod")
294
295     # Send the prod mail
296     utils.send_mail(prod_mail_message)
297
298     print "Sent prodding message"
299
300 ################################################################################
301
302 def edit_note(note, upload, session):
303     # Write the current data to a temporary file
304     (fd, temp_filename) = utils.temp_filename()
305     editor = os.environ.get("EDITOR","vi")
306     answer = 'E'
307     while answer == 'E':
308         os.system("%s %s" % (editor, temp_filename))
309         temp_file = utils.open_file(temp_filename)
310         newnote = temp_file.read().rstrip()
311         temp_file.close()
312         print "New Note:"
313         print utils.prefix_multi_line_string(newnote,"  ")
314         prompt = "[D]one, Edit, Abandon, Quit ?"
315         answer = "XXX"
316         while prompt.find(answer) == -1:
317             answer = utils.our_raw_input(prompt)
318             m = re_default_answer.search(prompt)
319             if answer == "":
320                 answer = m.group(1)
321             answer = answer[:1].upper()
322     os.unlink(temp_filename)
323     if answer == 'A':
324         return
325     elif answer == 'Q':
326         end()
327         sys.exit(0)
328
329     comment = NewComment()
330     comment.package = upload.pkg.changes["source"]
331     comment.version = upload.pkg.changes["version"]
332     comment.comment = newnote
333     comment.author  = utils.whoami()
334     comment.trainee = bool(Options["Trainee"])
335     session.add(comment)
336     session.commit()
337
338 ###############################################################################
339
340 class Upload(object):
341     """
342     Everything that has to do with an upload processed.
343
344     """
345     def __init__(self):
346         self.logger = None
347         self.pkg = Changes()
348         self.reset()
349
350     ###########################################################################
351
352     def reset (self):
353         """ Reset a number of internal variables."""
354
355         # Initialize the substitution template map
356         cnf = Config()
357         self.Subst = {}
358         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
359         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
360         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
361         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
362
363         self.rejects = []
364         self.warnings = []
365         self.notes = []
366
367         self.later_check_files = []
368
369         self.pkg.reset()
370
371     def package_info(self):
372         """
373         Format various messages from this Upload to send to the maintainer.
374         """
375
376         msgs = (
377             ('Reject Reasons', self.rejects),
378             ('Warnings', self.warnings),
379             ('Notes', self.notes),
380         )
381
382         msg = ''
383         for title, messages in msgs:
384             if messages:
385                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
386         msg += '\n\n'
387
388         return msg
389
390     ###########################################################################
391     def update_subst(self):
392         """ Set up the per-package template substitution mappings """
393
394         cnf = Config()
395
396         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
397         if not self.pkg.changes.has_key("architecture") or not \
398            isinstance(self.pkg.changes["architecture"], dict):
399             self.pkg.changes["architecture"] = { "Unknown" : "" }
400
401         # and maintainer2047 may not exist.
402         if not self.pkg.changes.has_key("maintainer2047"):
403             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
404
405         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
406         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
407         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
408
409         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
410         if self.pkg.changes["architecture"].has_key("source") and \
411            self.pkg.changes["changedby822"] != "" and \
412            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
413
414             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
415             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
416             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
417         else:
418             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
419             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
420             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
421
422         # Process policy doesn't set the fingerprint field and I don't want to make it
423         # do it for now as I don't want to have to deal with the case where we accepted
424         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
425         # the meantime so the package will be remarked as rejectable.  Urgh.
426         # TODO: Fix this properly
427         if self.pkg.changes.has_key('fingerprint'):
428             session = DBConn().session()
429             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
430             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
431                 if self.pkg.changes.has_key("sponsoremail"):
432                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
433             session.close()
434
435         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
436             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
437
438         # Apply any global override of the Maintainer field
439         if cnf.get("Dinstall::OverrideMaintainer"):
440             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
441             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
442
443         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
444         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
445         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
446         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
447
448     ###########################################################################
449     def load_changes(self, filename):
450         """
451         Load a changes file and setup a dictionary around it. Also checks for mandantory
452         fields  within.
453
454         @type filename: string
455         @param filename: Changes filename, full path.
456
457         @rtype: boolean
458         @return: whether the changes file was valid or not.  We may want to
459                  reject even if this is True (see what gets put in self.rejects).
460                  This is simply to prevent us even trying things later which will
461                  fail because we couldn't properly parse the file.
462         """
463         Cnf = Config()
464         self.pkg.changes_file = filename
465
466         # Parse the .changes field into a dictionary
467         try:
468             self.pkg.changes.update(parse_changes(filename))
469         except CantOpenError:
470             self.rejects.append("%s: can't read file." % (filename))
471             return False
472         except ParseChangesError, line:
473             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
474             return False
475         except ChangesUnicodeError:
476             self.rejects.append("%s: changes file not proper utf-8" % (filename))
477             return False
478
479         # Parse the Files field from the .changes into another dictionary
480         try:
481             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
482         except ParseChangesError, line:
483             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
484             return False
485         except UnknownFormatError, format:
486             self.rejects.append("%s: unknown format '%s'." % (filename, format))
487             return False
488
489         # Check for mandatory fields
490         for i in ("distribution", "source", "binary", "architecture",
491                   "version", "maintainer", "files", "changes", "description"):
492             if not self.pkg.changes.has_key(i):
493                 # Avoid undefined errors later
494                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
495                 return False
496
497         # Strip a source version in brackets from the source field
498         if re_strip_srcver.search(self.pkg.changes["source"]):
499             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
500
501         # Ensure the source field is a valid package name.
502         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
503             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
504
505         # Split multi-value fields into a lower-level dictionary
506         for i in ("architecture", "distribution", "binary", "closes"):
507             o = self.pkg.changes.get(i, "")
508             if o != "":
509                 del self.pkg.changes[i]
510
511             self.pkg.changes[i] = {}
512
513             for j in o.split():
514                 self.pkg.changes[i][j] = 1
515
516         # Fix the Maintainer: field to be RFC822/2047 compatible
517         try:
518             (self.pkg.changes["maintainer822"],
519              self.pkg.changes["maintainer2047"],
520              self.pkg.changes["maintainername"],
521              self.pkg.changes["maintaineremail"]) = \
522                    fix_maintainer (self.pkg.changes["maintainer"])
523         except ParseMaintError, msg:
524             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
525                    % (filename, self.pkg.changes["maintainer"], msg))
526
527         # ...likewise for the Changed-By: field if it exists.
528         try:
529             (self.pkg.changes["changedby822"],
530              self.pkg.changes["changedby2047"],
531              self.pkg.changes["changedbyname"],
532              self.pkg.changes["changedbyemail"]) = \
533                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
534         except ParseMaintError, msg:
535             self.pkg.changes["changedby822"] = ""
536             self.pkg.changes["changedby2047"] = ""
537             self.pkg.changes["changedbyname"] = ""
538             self.pkg.changes["changedbyemail"] = ""
539
540             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
541                    % (filename, self.pkg.changes["changed-by"], msg))
542
543         # Ensure all the values in Closes: are numbers
544         if self.pkg.changes.has_key("closes"):
545             for i in self.pkg.changes["closes"].keys():
546                 if re_isanum.match (i) == None:
547                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
548
549         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
550         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
551         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
552
553         # Check the .changes is non-empty
554         if not self.pkg.files:
555             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
556             return False
557
558         # Changes was syntactically valid even if we'll reject
559         return True
560
561     ###########################################################################
562
563     def check_distributions(self):
564         "Check and map the Distribution field"
565
566         Cnf = Config()
567
568         # Handle suite mappings
569         for m in Cnf.ValueList("SuiteMappings"):
570             args = m.split()
571             mtype = args[0]
572             if mtype == "map" or mtype == "silent-map":
573                 (source, dest) = args[1:3]
574                 if self.pkg.changes["distribution"].has_key(source):
575                     del self.pkg.changes["distribution"][source]
576                     self.pkg.changes["distribution"][dest] = 1
577                     if mtype != "silent-map":
578                         self.notes.append("Mapping %s to %s." % (source, dest))
579                 if self.pkg.changes.has_key("distribution-version"):
580                     if self.pkg.changes["distribution-version"].has_key(source):
581                         self.pkg.changes["distribution-version"][source]=dest
582             elif mtype == "map-unreleased":
583                 (source, dest) = args[1:3]
584                 if self.pkg.changes["distribution"].has_key(source):
585                     for arch in self.pkg.changes["architecture"].keys():
586                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
587                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
588                             del self.pkg.changes["distribution"][source]
589                             self.pkg.changes["distribution"][dest] = 1
590                             break
591             elif mtype == "ignore":
592                 suite = args[1]
593                 if self.pkg.changes["distribution"].has_key(suite):
594                     del self.pkg.changes["distribution"][suite]
595                     self.warnings.append("Ignoring %s as a target suite." % (suite))
596             elif mtype == "reject":
597                 suite = args[1]
598                 if self.pkg.changes["distribution"].has_key(suite):
599                     self.rejects.append("Uploads to %s are not accepted." % (suite))
600             elif mtype == "propup-version":
601                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
602                 #
603                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
604                 if self.pkg.changes["distribution"].has_key(args[1]):
605                     self.pkg.changes.setdefault("distribution-version", {})
606                     for suite in args[2:]:
607                         self.pkg.changes["distribution-version"][suite] = suite
608
609         # Ensure there is (still) a target distribution
610         if len(self.pkg.changes["distribution"].keys()) < 1:
611             self.rejects.append("No valid distribution remaining.")
612
613         # Ensure target distributions exist
614         for suite in self.pkg.changes["distribution"].keys():
615             if not Cnf.has_key("Suite::%s" % (suite)):
616                 self.rejects.append("Unknown distribution `%s'." % (suite))
617
618     ###########################################################################
619
620     def binary_file_checks(self, f, session):
621         cnf = Config()
622         entry = self.pkg.files[f]
623
624         # Extract package control information
625         deb_file = utils.open_file(f)
626         try:
627             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
628         except:
629             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
630             deb_file.close()
631             # Can't continue, none of the checks on control would work.
632             return
633
634         # Check for mandantory "Description:"
635         deb_file.seek(0)
636         try:
637             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
638         except:
639             self.rejects.append("%s: Missing Description in binary package" % (f))
640             return
641
642         deb_file.close()
643
644         # Check for mandatory fields
645         for field in [ "Package", "Architecture", "Version" ]:
646             if control.Find(field) == None:
647                 # Can't continue
648                 self.rejects.append("%s: No %s field in control." % (f, field))
649                 return
650
651         # Ensure the package name matches the one give in the .changes
652         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
653             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
654
655         # Validate the package field
656         package = control.Find("Package")
657         if not re_valid_pkg_name.match(package):
658             self.rejects.append("%s: invalid package name '%s'." % (f, package))
659
660         # Validate the version field
661         version = control.Find("Version")
662         if not re_valid_version.match(version):
663             self.rejects.append("%s: invalid version number '%s'." % (f, version))
664
665         # Ensure the architecture of the .deb is one we know about.
666         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
667         architecture = control.Find("Architecture")
668         upload_suite = self.pkg.changes["distribution"].keys()[0]
669
670         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
671             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
672             self.rejects.append("Unknown architecture '%s'." % (architecture))
673
674         # Ensure the architecture of the .deb is one of the ones
675         # listed in the .changes.
676         if not self.pkg.changes["architecture"].has_key(architecture):
677             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
678
679         # Sanity-check the Depends field
680         depends = control.Find("Depends")
681         if depends == '':
682             self.rejects.append("%s: Depends field is empty." % (f))
683
684         # Sanity-check the Provides field
685         provides = control.Find("Provides")
686         if provides:
687             provide = re_spacestrip.sub('', provides)
688             if provide == '':
689                 self.rejects.append("%s: Provides field is empty." % (f))
690             prov_list = provide.split(",")
691             for prov in prov_list:
692                 if not re_valid_pkg_name.match(prov):
693                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
694
695         # Check the section & priority match those given in the .changes (non-fatal)
696         if     control.Find("Section") and entry["section"] != "" \
697            and entry["section"] != control.Find("Section"):
698             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
699                                 (f, control.Find("Section", ""), entry["section"]))
700         if control.Find("Priority") and entry["priority"] != "" \
701            and entry["priority"] != control.Find("Priority"):
702             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
703                                 (f, control.Find("Priority", ""), entry["priority"]))
704
705         entry["package"] = package
706         entry["architecture"] = architecture
707         entry["version"] = version
708         entry["maintainer"] = control.Find("Maintainer", "")
709
710         if f.endswith(".udeb"):
711             self.pkg.files[f]["dbtype"] = "udeb"
712         elif f.endswith(".deb"):
713             self.pkg.files[f]["dbtype"] = "deb"
714         else:
715             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
716
717         entry["source"] = control.Find("Source", entry["package"])
718
719         # Get the source version
720         source = entry["source"]
721         source_version = ""
722
723         if source.find("(") != -1:
724             m = re_extract_src_version.match(source)
725             source = m.group(1)
726             source_version = m.group(2)
727
728         if not source_version:
729             source_version = self.pkg.files[f]["version"]
730
731         entry["source package"] = source
732         entry["source version"] = source_version
733
734         # Ensure the filename matches the contents of the .deb
735         m = re_isadeb.match(f)
736
737         #  package name
738         file_package = m.group(1)
739         if entry["package"] != file_package:
740             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
741                                 (f, file_package, entry["dbtype"], entry["package"]))
742         epochless_version = re_no_epoch.sub('', control.Find("Version"))
743
744         #  version
745         file_version = m.group(2)
746         if epochless_version != file_version:
747             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
748                                 (f, file_version, entry["dbtype"], epochless_version))
749
750         #  architecture
751         file_architecture = m.group(3)
752         if entry["architecture"] != file_architecture:
753             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
754                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
755
756         # Check for existent source
757         source_version = entry["source version"]
758         source_package = entry["source package"]
759         if self.pkg.changes["architecture"].has_key("source"):
760             if source_version != self.pkg.changes["version"]:
761                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
762                                     (source_version, f, self.pkg.changes["version"]))
763         else:
764             # Check in the SQL database
765             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
766                 # Check in one of the other directories
767                 source_epochless_version = re_no_epoch.sub('', source_version)
768                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
769                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
770                     entry["byhand"] = 1
771                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
772                     entry["new"] = 1
773                 else:
774                     dsc_file_exists = False
775                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
776                         if cnf.has_key("Dir::Queue::%s" % (myq)):
777                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
778                                 dsc_file_exists = True
779                                 break
780
781                     if not dsc_file_exists:
782                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
783
784         # Check the version and for file overwrites
785         self.check_binary_against_db(f, session)
786
787         # Temporarily disable contents generation until we change the table storage layout
788         #b = Binary(f)
789         #b.scan_package()
790         #if len(b.rejects) > 0:
791         #    for j in b.rejects:
792         #        self.rejects.append(j)
793
794     def source_file_checks(self, f, session):
795         entry = self.pkg.files[f]
796
797         m = re_issource.match(f)
798         if not m:
799             return
800
801         entry["package"] = m.group(1)
802         entry["version"] = m.group(2)
803         entry["type"] = m.group(3)
804
805         # Ensure the source package name matches the Source filed in the .changes
806         if self.pkg.changes["source"] != entry["package"]:
807             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
808
809         # Ensure the source version matches the version in the .changes file
810         if re_is_orig_source.match(f):
811             changes_version = self.pkg.changes["chopversion2"]
812         else:
813             changes_version = self.pkg.changes["chopversion"]
814
815         if changes_version != entry["version"]:
816             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
817
818         # Ensure the .changes lists source in the Architecture field
819         if not self.pkg.changes["architecture"].has_key("source"):
820             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
821
822         # Check the signature of a .dsc file
823         if entry["type"] == "dsc":
824             # check_signature returns either:
825             #  (None, [list, of, rejects]) or (signature, [])
826             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
827             for j in rejects:
828                 self.rejects.append(j)
829
830         entry["architecture"] = "source"
831
832     def per_suite_file_checks(self, f, suite, session):
833         cnf = Config()
834         entry = self.pkg.files[f]
835
836         # Skip byhand
837         if entry.has_key("byhand"):
838             return
839
840         # Check we have fields we need to do these checks
841         oktogo = True
842         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
843             if not entry.has_key(m):
844                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
845                 oktogo = False
846
847         if not oktogo:
848             return
849
850         # Handle component mappings
851         for m in cnf.ValueList("ComponentMappings"):
852             (source, dest) = m.split()
853             if entry["component"] == source:
854                 entry["original component"] = source
855                 entry["component"] = dest
856
857         # Ensure the component is valid for the target suite
858         if cnf.has_key("Suite:%s::Components" % (suite)) and \
859            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
860             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
861             return
862
863         # Validate the component
864         if not get_component(entry["component"], session):
865             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
866             return
867
868         # See if the package is NEW
869         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
870             entry["new"] = 1
871
872         # Validate the priority
873         if entry["priority"].find('/') != -1:
874             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
875
876         # Determine the location
877         location = cnf["Dir::Pool"]
878         l = get_location(location, entry["component"], session=session)
879         if l is None:
880             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
881             entry["location id"] = -1
882         else:
883             entry["location id"] = l.location_id
884
885         # Check the md5sum & size against existing files (if any)
886         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
887
888         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
889                                          entry["size"], entry["md5sum"], entry["location id"])
890
891         if found is None:
892             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
893         elif found is False and poolfile is not None:
894             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
895         else:
896             if poolfile is None:
897                 entry["files id"] = None
898             else:
899                 entry["files id"] = poolfile.file_id
900
901         # Check for packages that have moved from one component to another
902         entry['suite'] = suite
903         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
904         if res.rowcount > 0:
905             entry["othercomponents"] = res.fetchone()[0]
906
907     def check_files(self, action=True):
908         file_keys = self.pkg.files.keys()
909         holding = Holding()
910         cnf = Config()
911
912         if action:
913             cwd = os.getcwd()
914             os.chdir(self.pkg.directory)
915             for f in file_keys:
916                 ret = holding.copy_to_holding(f)
917                 if ret is not None:
918                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
919
920             os.chdir(cwd)
921
922         # check we already know the changes file
923         # [NB: this check must be done post-suite mapping]
924         base_filename = os.path.basename(self.pkg.changes_file)
925
926         session = DBConn().session()
927
928         try:
929             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
930             # if in the pool or in a queue other than unchecked, reject
931             if (dbc.in_queue is None) \
932                    or (dbc.in_queue is not None
933                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
934                 self.rejects.append("%s file already known to dak" % base_filename)
935         except NoResultFound, e:
936             # not known, good
937             pass
938
939         has_binaries = False
940         has_source = False
941
942         for f, entry in self.pkg.files.items():
943             # Ensure the file does not already exist in one of the accepted directories
944             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
945                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
946                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
947                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
948
949             if not re_taint_free.match(f):
950                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
951
952             # Check the file is readable
953             if os.access(f, os.R_OK) == 0:
954                 # When running in -n, copy_to_holding() won't have
955                 # generated the reject_message, so we need to.
956                 if action:
957                     if os.path.exists(f):
958                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
959                     else:
960                         # Don't directly reject, mark to check later to deal with orig's
961                         # we can find in the pool
962                         self.later_check_files.append(f)
963                 entry["type"] = "unreadable"
964                 continue
965
966             # If it's byhand skip remaining checks
967             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
968                 entry["byhand"] = 1
969                 entry["type"] = "byhand"
970
971             # Checks for a binary package...
972             elif re_isadeb.match(f):
973                 has_binaries = True
974                 entry["type"] = "deb"
975
976                 # This routine appends to self.rejects/warnings as appropriate
977                 self.binary_file_checks(f, session)
978
979             # Checks for a source package...
980             elif re_issource.match(f):
981                 has_source = True
982
983                 # This routine appends to self.rejects/warnings as appropriate
984                 self.source_file_checks(f, session)
985
986             # Not a binary or source package?  Assume byhand...
987             else:
988                 entry["byhand"] = 1
989                 entry["type"] = "byhand"
990
991             # Per-suite file checks
992             entry["oldfiles"] = {}
993             for suite in self.pkg.changes["distribution"].keys():
994                 self.per_suite_file_checks(f, suite, session)
995
996         session.close()
997
998         # If the .changes file says it has source, it must have source.
999         if self.pkg.changes["architecture"].has_key("source"):
1000             if not has_source:
1001                 self.rejects.append("no source found and Architecture line in changes mention source.")
1002
1003             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1004                 self.rejects.append("source only uploads are not supported.")
1005
1006     ###########################################################################
1007     def check_dsc(self, action=True, session=None):
1008         """Returns bool indicating whether or not the source changes are valid"""
1009         # Ensure there is source to check
1010         if not self.pkg.changes["architecture"].has_key("source"):
1011             return True
1012
1013         # Find the .dsc
1014         dsc_filename = None
1015         for f, entry in self.pkg.files.items():
1016             if entry["type"] == "dsc":
1017                 if dsc_filename:
1018                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1019                     return False
1020                 else:
1021                     dsc_filename = f
1022
1023         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1024         if not dsc_filename:
1025             self.rejects.append("source uploads must contain a dsc file")
1026             return False
1027
1028         # Parse the .dsc file
1029         try:
1030             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1031         except CantOpenError:
1032             # if not -n copy_to_holding() will have done this for us...
1033             if not action:
1034                 self.rejects.append("%s: can't read file." % (dsc_filename))
1035         except ParseChangesError, line:
1036             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1037         except InvalidDscError, line:
1038             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1039         except ChangesUnicodeError:
1040             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1041
1042         # Build up the file list of files mentioned by the .dsc
1043         try:
1044             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1045         except NoFilesFieldError:
1046             self.rejects.append("%s: no Files: field." % (dsc_filename))
1047             return False
1048         except UnknownFormatError, format:
1049             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1050             return False
1051         except ParseChangesError, line:
1052             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1053             return False
1054
1055         # Enforce mandatory fields
1056         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1057             if not self.pkg.dsc.has_key(i):
1058                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1059                 return False
1060
1061         # Validate the source and version fields
1062         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1063             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1064         if not re_valid_version.match(self.pkg.dsc["version"]):
1065             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1066
1067         # Only a limited list of source formats are allowed in each suite
1068         for dist in self.pkg.changes["distribution"].keys():
1069             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1070             if self.pkg.dsc["format"] not in allowed:
1071                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1072
1073         # Validate the Maintainer field
1074         try:
1075             # We ignore the return value
1076             fix_maintainer(self.pkg.dsc["maintainer"])
1077         except ParseMaintError, msg:
1078             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1079                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1080
1081         # Validate the build-depends field(s)
1082         for field_name in [ "build-depends", "build-depends-indep" ]:
1083             field = self.pkg.dsc.get(field_name)
1084             if field:
1085                 # Have apt try to parse them...
1086                 try:
1087                     apt_pkg.ParseSrcDepends(field)
1088                 except:
1089                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1090
1091         # Ensure the version number in the .dsc matches the version number in the .changes
1092         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1093         changes_version = self.pkg.files[dsc_filename]["version"]
1094
1095         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1096             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1097
1098         # Ensure the Files field contain only what's expected
1099         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1100
1101         # Ensure source is newer than existing source in target suites
1102         session = DBConn().session()
1103         self.check_source_against_db(dsc_filename, session)
1104         self.check_dsc_against_db(dsc_filename, session)
1105         session.close()
1106
1107         # Finally, check if we're missing any files
1108         for f in self.later_check_files:
1109             self.rejects.append("Could not find file %s references in changes" % f)
1110
1111         return True
1112
1113     ###########################################################################
1114
1115     def get_changelog_versions(self, source_dir):
1116         """Extracts a the source package and (optionally) grabs the
1117         version history out of debian/changelog for the BTS."""
1118
1119         cnf = Config()
1120
1121         # Find the .dsc (again)
1122         dsc_filename = None
1123         for f in self.pkg.files.keys():
1124             if self.pkg.files[f]["type"] == "dsc":
1125                 dsc_filename = f
1126
1127         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1128         if not dsc_filename:
1129             return
1130
1131         # Create a symlink mirror of the source files in our temporary directory
1132         for f in self.pkg.files.keys():
1133             m = re_issource.match(f)
1134             if m:
1135                 src = os.path.join(source_dir, f)
1136                 # If a file is missing for whatever reason, give up.
1137                 if not os.path.exists(src):
1138                     return
1139                 ftype = m.group(3)
1140                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1141                    self.pkg.orig_files[f].has_key("path"):
1142                     continue
1143                 dest = os.path.join(os.getcwd(), f)
1144                 os.symlink(src, dest)
1145
1146         # If the orig files are not a part of the upload, create symlinks to the
1147         # existing copies.
1148         for orig_file in self.pkg.orig_files.keys():
1149             if not self.pkg.orig_files[orig_file].has_key("path"):
1150                 continue
1151             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1152             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1153
1154         # Extract the source
1155         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1156         (result, output) = commands.getstatusoutput(cmd)
1157         if (result != 0):
1158             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1159             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1160             return
1161
1162         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1163             return
1164
1165         # Get the upstream version
1166         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1167         if re_strip_revision.search(upstr_version):
1168             upstr_version = re_strip_revision.sub('', upstr_version)
1169
1170         # Ensure the changelog file exists
1171         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1172         if not os.path.exists(changelog_filename):
1173             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1174             return
1175
1176         # Parse the changelog
1177         self.pkg.dsc["bts changelog"] = ""
1178         changelog_file = utils.open_file(changelog_filename)
1179         for line in changelog_file.readlines():
1180             m = re_changelog_versions.match(line)
1181             if m:
1182                 self.pkg.dsc["bts changelog"] += line
1183         changelog_file.close()
1184
1185         # Check we found at least one revision in the changelog
1186         if not self.pkg.dsc["bts changelog"]:
1187             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1188
1189     def check_source(self):
1190         # Bail out if:
1191         #    a) there's no source
1192         if not self.pkg.changes["architecture"].has_key("source"):
1193             return
1194
1195         tmpdir = utils.temp_dirname()
1196
1197         # Move into the temporary directory
1198         cwd = os.getcwd()
1199         os.chdir(tmpdir)
1200
1201         # Get the changelog version history
1202         self.get_changelog_versions(cwd)
1203
1204         # Move back and cleanup the temporary tree
1205         os.chdir(cwd)
1206
1207         try:
1208             shutil.rmtree(tmpdir)
1209         except OSError, e:
1210             if e.errno != errno.EACCES:
1211                 print "foobar"
1212                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1213
1214             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1215             # We probably have u-r or u-w directories so chmod everything
1216             # and try again.
1217             cmd = "chmod -R u+rwx %s" % (tmpdir)
1218             result = os.system(cmd)
1219             if result != 0:
1220                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1221             shutil.rmtree(tmpdir)
1222         except Exception, e:
1223             print "foobar2 (%s)" % e
1224             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1225
1226     ###########################################################################
1227     def ensure_hashes(self):
1228         # Make sure we recognise the format of the Files: field in the .changes
1229         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1230         if len(format) == 2:
1231             format = int(format[0]), int(format[1])
1232         else:
1233             format = int(float(format[0])), 0
1234
1235         # We need to deal with the original changes blob, as the fields we need
1236         # might not be in the changes dict serialised into the .dak anymore.
1237         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1238
1239         # Copy the checksums over to the current changes dict.  This will keep
1240         # the existing modifications to it intact.
1241         for field in orig_changes:
1242             if field.startswith('checksums-'):
1243                 self.pkg.changes[field] = orig_changes[field]
1244
1245         # Check for unsupported hashes
1246         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1247             self.rejects.append(j)
1248
1249         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1250             self.rejects.append(j)
1251
1252         # We have to calculate the hash if we have an earlier changes version than
1253         # the hash appears in rather than require it exist in the changes file
1254         for hashname, hashfunc, version in utils.known_hashes:
1255             # TODO: Move _ensure_changes_hash into this class
1256             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1257                 self.rejects.append(j)
1258             if "source" in self.pkg.changes["architecture"]:
1259                 # TODO: Move _ensure_dsc_hash into this class
1260                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1261                     self.rejects.append(j)
1262
1263     def check_hashes(self):
1264         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1265             self.rejects.append(m)
1266
1267         for m in utils.check_size(".changes", self.pkg.files):
1268             self.rejects.append(m)
1269
1270         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1271             self.rejects.append(m)
1272
1273         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1274             self.rejects.append(m)
1275
1276         self.ensure_hashes()
1277
1278     ###########################################################################
1279
1280     def ensure_orig(self, target_dir='.', session=None):
1281         """
1282         Ensures that all orig files mentioned in the changes file are present
1283         in target_dir. If they do not exist, they are symlinked into place.
1284
1285         An list containing the symlinks that were created are returned (so they
1286         can be removed).
1287         """
1288
1289         symlinked = []
1290         cnf = Config()
1291
1292         for filename, entry in self.pkg.dsc_files.iteritems():
1293             if not re_is_orig_source.match(filename):
1294                 # File is not an orig; ignore
1295                 continue
1296
1297             if os.path.exists(filename):
1298                 # File exists, no need to continue
1299                 continue
1300
1301             def symlink_if_valid(path):
1302                 f = utils.open_file(path)
1303                 md5sum = apt_pkg.md5sum(f)
1304                 f.close()
1305
1306                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1307                 expected = (int(entry['size']), entry['md5sum'])
1308
1309                 if fingerprint != expected:
1310                     return False
1311
1312                 dest = os.path.join(target_dir, filename)
1313
1314                 os.symlink(path, dest)
1315                 symlinked.append(dest)
1316
1317                 return True
1318
1319             session_ = session
1320             if session is None:
1321                 session_ = DBConn().session()
1322
1323             found = False
1324
1325             # Look in the pool
1326             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1327                 poolfile_path = os.path.join(
1328                     poolfile.location.path, poolfile.filename
1329                 )
1330
1331                 if symlink_if_valid(poolfile_path):
1332                     found = True
1333                     break
1334
1335             if session is None:
1336                 session_.close()
1337
1338             if found:
1339                 continue
1340
1341             # Look in some other queues for the file
1342             queues = ('New', 'Byhand', 'ProposedUpdates',
1343                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1344
1345             for queue in queues:
1346                 if not cnf.get('Dir::Queue::%s' % queue):
1347                     continue
1348
1349                 queuefile_path = os.path.join(
1350                     cnf['Dir::Queue::%s' % queue], filename
1351                 )
1352
1353                 if not os.path.exists(queuefile_path):
1354                     # Does not exist in this queue
1355                     continue
1356
1357                 if symlink_if_valid(queuefile_path):
1358                     break
1359
1360         return symlinked
1361
1362     ###########################################################################
1363
1364     def check_lintian(self):
1365         """
1366         Extends self.rejects by checking the output of lintian against tags
1367         specified in Dinstall::LintianTags.
1368         """
1369
1370         cnf = Config()
1371
1372         # Don't reject binary uploads
1373         if not self.pkg.changes['architecture'].has_key('source'):
1374             return
1375
1376         # Only check some distributions
1377         for dist in ('unstable', 'experimental'):
1378             if dist in self.pkg.changes['distribution']:
1379                 break
1380         else:
1381             return
1382
1383         # If we do not have a tagfile, don't do anything
1384         tagfile = cnf.get("Dinstall::LintianTags")
1385         if tagfile is None:
1386             return
1387
1388         # Parse the yaml file
1389         sourcefile = file(tagfile, 'r')
1390         sourcecontent = sourcefile.read()
1391         sourcefile.close()
1392
1393         try:
1394             lintiantags = yaml.load(sourcecontent)['lintian']
1395         except yaml.YAMLError, msg:
1396             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1397             return
1398
1399         # Try and find all orig mentioned in the .dsc
1400         symlinked = self.ensure_orig()
1401
1402         # Setup the input file for lintian
1403         fd, temp_filename = utils.temp_filename()
1404         temptagfile = os.fdopen(fd, 'w')
1405         for tags in lintiantags.values():
1406             temptagfile.writelines(['%s\n' % x for x in tags])
1407         temptagfile.close()
1408
1409         try:
1410             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1411                 (temp_filename, self.pkg.changes_file)
1412
1413             result, output = commands.getstatusoutput(cmd)
1414         finally:
1415             # Remove our tempfile and any symlinks we created
1416             os.unlink(temp_filename)
1417
1418             for symlink in symlinked:
1419                 os.unlink(symlink)
1420
1421         if result == 2:
1422             utils.warn("lintian failed for %s [return code: %s]." % \
1423                 (self.pkg.changes_file, result))
1424             utils.warn(utils.prefix_multi_line_string(output, \
1425                 " [possible output:] "))
1426
1427         def log(*txt):
1428             if self.logger:
1429                 self.logger.log(
1430                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1431                 )
1432
1433         # Generate messages
1434         parsed_tags = parse_lintian_output(output)
1435         self.rejects.extend(
1436             generate_reject_messages(parsed_tags, lintiantags, log=log)
1437         )
1438
1439     ###########################################################################
1440     def check_urgency(self):
1441         cnf = Config()
1442         if self.pkg.changes["architecture"].has_key("source"):
1443             if not self.pkg.changes.has_key("urgency"):
1444                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1445             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1446             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1447                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1448                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1449                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1450
1451     ###########################################################################
1452
1453     # Sanity check the time stamps of files inside debs.
1454     # [Files in the near future cause ugly warnings and extreme time
1455     #  travel can cause errors on extraction]
1456
1457     def check_timestamps(self):
1458         Cnf = Config()
1459
1460         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1461         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1462         tar = TarTime(future_cutoff, past_cutoff)
1463
1464         for filename, entry in self.pkg.files.items():
1465             if entry["type"] == "deb":
1466                 tar.reset()
1467                 try:
1468                     deb_file = utils.open_file(filename)
1469                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1470                     deb_file.seek(0)
1471                     try:
1472                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1473                     except SystemError, e:
1474                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1475                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1476                             raise
1477                         deb_file.seek(0)
1478                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1479
1480                     deb_file.close()
1481
1482                     future_files = tar.future_files.keys()
1483                     if future_files:
1484                         num_future_files = len(future_files)
1485                         future_file = future_files[0]
1486                         future_date = tar.future_files[future_file]
1487                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1488                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1489
1490                     ancient_files = tar.ancient_files.keys()
1491                     if ancient_files:
1492                         num_ancient_files = len(ancient_files)
1493                         ancient_file = ancient_files[0]
1494                         ancient_date = tar.ancient_files[ancient_file]
1495                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1496                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1497                 except:
1498                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1499
1500     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1501         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1502             sponsored = False
1503         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1504             sponsored = False
1505             if uid_name == "":
1506                 sponsored = True
1507         else:
1508             sponsored = True
1509             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1510                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1511                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1512                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1513                         self.pkg.changes["sponsoremail"] = uid_email
1514
1515         return sponsored
1516
1517
1518     ###########################################################################
1519     # check_signed_by_key checks
1520     ###########################################################################
1521
1522     def check_signed_by_key(self):
1523         """Ensure the .changes is signed by an authorized uploader."""
1524         session = DBConn().session()
1525
1526         # First of all we check that the person has proper upload permissions
1527         # and that this upload isn't blocked
1528         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1529
1530         if fpr is None:
1531             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1532             return
1533
1534         # TODO: Check that import-keyring adds UIDs properly
1535         if not fpr.uid:
1536             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1537             return
1538
1539         # Check that the fingerprint which uploaded has permission to do so
1540         self.check_upload_permissions(fpr, session)
1541
1542         # Check that this package is not in a transition
1543         self.check_transition(session)
1544
1545         session.close()
1546
1547
1548     def check_upload_permissions(self, fpr, session):
1549         # Check any one-off upload blocks
1550         self.check_upload_blocks(fpr, session)
1551
1552         # Start with DM as a special case
1553         # DM is a special case unfortunately, so we check it first
1554         # (keys with no source access get more access than DMs in one
1555         #  way; DMs can only upload for their packages whether source
1556         #  or binary, whereas keys with no access might be able to
1557         #  upload some binaries)
1558         if fpr.source_acl.access_level == 'dm':
1559             self.check_dm_upload(fpr, session)
1560         else:
1561             # Check source-based permissions for other types
1562             if self.pkg.changes["architecture"].has_key("source") and \
1563                 fpr.source_acl.access_level is None:
1564                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1565                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1566                 self.rejects.append(rej)
1567                 return
1568             # If not a DM, we allow full upload rights
1569             uid_email = "%s@debian.org" % (fpr.uid.uid)
1570             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1571
1572
1573         # Check binary upload permissions
1574         # By this point we know that DMs can't have got here unless they
1575         # are allowed to deal with the package concerned so just apply
1576         # normal checks
1577         if fpr.binary_acl.access_level == 'full':
1578             return
1579
1580         # Otherwise we're in the map case
1581         tmparches = self.pkg.changes["architecture"].copy()
1582         tmparches.pop('source', None)
1583
1584         for bam in fpr.binary_acl_map:
1585             tmparches.pop(bam.architecture.arch_string, None)
1586
1587         if len(tmparches.keys()) > 0:
1588             if fpr.binary_reject:
1589                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1590                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1591                 self.rejects.append(rej)
1592             else:
1593                 # TODO: This is where we'll implement reject vs throw away binaries later
1594                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1595                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1596                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1597                 self.rejects.append(rej)
1598
1599
1600     def check_upload_blocks(self, fpr, session):
1601         """Check whether any upload blocks apply to this source, source
1602            version, uid / fpr combination"""
1603
1604         def block_rej_template(fb):
1605             rej = 'Manual upload block in place for package %s' % fb.source
1606             if fb.version is not None:
1607                 rej += ', version %s' % fb.version
1608             return rej
1609
1610         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1611             # version is None if the block applies to all versions
1612             if fb.version is None or fb.version == self.pkg.changes['version']:
1613                 # Check both fpr and uid - either is enough to cause a reject
1614                 if fb.fpr is not None:
1615                     if fb.fpr.fingerprint == fpr.fingerprint:
1616                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1617                 if fb.uid is not None:
1618                     if fb.uid == fpr.uid:
1619                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1620
1621
1622     def check_dm_upload(self, fpr, session):
1623         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1624         ## none of the uploaded packages are NEW
1625         rej = False
1626         for f in self.pkg.files.keys():
1627             if self.pkg.files[f].has_key("byhand"):
1628                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1629                 rej = True
1630             if self.pkg.files[f].has_key("new"):
1631                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1632                 rej = True
1633
1634         if rej:
1635             return
1636
1637         ## the most recent version of the package uploaded to unstable or
1638         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1639         ## section of its control file
1640         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1641         q = q.join(SrcAssociation)
1642         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1643         q = q.order_by(desc('source.version')).limit(1)
1644
1645         r = q.all()
1646
1647         if len(r) != 1:
1648             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1649             self.rejects.append(rej)
1650             return
1651
1652         r = r[0]
1653         if not r.dm_upload_allowed:
1654             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1655             self.rejects.append(rej)
1656             return
1657
1658         ## the Maintainer: field of the uploaded .changes file corresponds with
1659         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1660         ## uploads)
1661         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1662             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1663
1664         ## the most recent version of the package uploaded to unstable or
1665         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1666         ## non-developer maintainers cannot NMU or hijack packages)
1667
1668         # srcuploaders includes the maintainer
1669         accept = False
1670         for sup in r.srcuploaders:
1671             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1672             # Eww - I hope we never have two people with the same name in Debian
1673             if email == fpr.uid.uid or name == fpr.uid.name:
1674                 accept = True
1675                 break
1676
1677         if not accept:
1678             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1679             return
1680
1681         ## none of the packages are being taken over from other source packages
1682         for b in self.pkg.changes["binary"].keys():
1683             for suite in self.pkg.changes["distribution"].keys():
1684                 q = session.query(DBSource)
1685                 q = q.join(DBBinary).filter_by(package=b)
1686                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1687
1688                 for s in q.all():
1689                     if s.source != self.pkg.changes["source"]:
1690                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1691
1692
1693
1694     def check_transition(self, session):
1695         cnf = Config()
1696
1697         sourcepkg = self.pkg.changes["source"]
1698
1699         # No sourceful upload -> no need to do anything else, direct return
1700         # We also work with unstable uploads, not experimental or those going to some
1701         # proposed-updates queue
1702         if "source" not in self.pkg.changes["architecture"] or \
1703            "unstable" not in self.pkg.changes["distribution"]:
1704             return
1705
1706         # Also only check if there is a file defined (and existant) with
1707         # checks.
1708         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1709         if transpath == "" or not os.path.exists(transpath):
1710             return
1711
1712         # Parse the yaml file
1713         sourcefile = file(transpath, 'r')
1714         sourcecontent = sourcefile.read()
1715         try:
1716             transitions = yaml.load(sourcecontent)
1717         except yaml.YAMLError, msg:
1718             # This shouldn't happen, there is a wrapper to edit the file which
1719             # checks it, but we prefer to be safe than ending up rejecting
1720             # everything.
1721             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1722             return
1723
1724         # Now look through all defined transitions
1725         for trans in transitions:
1726             t = transitions[trans]
1727             source = t["source"]
1728             expected = t["new"]
1729
1730             # Will be None if nothing is in testing.
1731             current = get_source_in_suite(source, "testing", session)
1732             if current is not None:
1733                 compare = apt_pkg.VersionCompare(current.version, expected)
1734
1735             if current is None or compare < 0:
1736                 # This is still valid, the current version in testing is older than
1737                 # the new version we wait for, or there is none in testing yet
1738
1739                 # Check if the source we look at is affected by this.
1740                 if sourcepkg in t['packages']:
1741                     # The source is affected, lets reject it.
1742
1743                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1744                         sourcepkg, trans)
1745
1746                     if current is not None:
1747                         currentlymsg = "at version %s" % (current.version)
1748                     else:
1749                         currentlymsg = "not present in testing"
1750
1751                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1752
1753                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1754 is part of a testing transition designed to get %s migrated (it is
1755 currently %s, we need version %s).  This transition is managed by the
1756 Release Team, and %s is the Release-Team member responsible for it.
1757 Please mail debian-release@lists.debian.org or contact %s directly if you
1758 need further assistance.  You might want to upload to experimental until this
1759 transition is done."""
1760                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1761
1762                     self.rejects.append(rejectmsg)
1763                     return
1764
1765     ###########################################################################
1766     # End check_signed_by_key checks
1767     ###########################################################################
1768
1769     def build_summaries(self):
1770         """ Build a summary of changes the upload introduces. """
1771
1772         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1773
1774         short_summary = summary
1775
1776         # This is for direport's benefit...
1777         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1778
1779         if byhand or new:
1780             summary += "Changes: " + f
1781
1782         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1783
1784         summary += self.announce(short_summary, 0)
1785
1786         return (summary, short_summary)
1787
1788     ###########################################################################
1789
1790     def close_bugs(self, summary, action):
1791         """
1792         Send mail to close bugs as instructed by the closes field in the changes file.
1793         Also add a line to summary if any work was done.
1794
1795         @type summary: string
1796         @param summary: summary text, as given by L{build_summaries}
1797
1798         @type action: bool
1799         @param action: Set to false no real action will be done.
1800
1801         @rtype: string
1802         @return: summary. If action was taken, extended by the list of closed bugs.
1803
1804         """
1805
1806         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1807
1808         bugs = self.pkg.changes["closes"].keys()
1809
1810         if not bugs:
1811             return summary
1812
1813         bugs.sort()
1814         summary += "Closing bugs: "
1815         for bug in bugs:
1816             summary += "%s " % (bug)
1817             if action:
1818                 self.update_subst()
1819                 self.Subst["__BUG_NUMBER__"] = bug
1820                 if self.pkg.changes["distribution"].has_key("stable"):
1821                     self.Subst["__STABLE_WARNING__"] = """
1822 Note that this package is not part of the released stable Debian
1823 distribution.  It may have dependencies on other unreleased software,
1824 or other instabilities.  Please take care if you wish to install it.
1825 The update will eventually make its way into the next released Debian
1826 distribution."""
1827                 else:
1828                     self.Subst["__STABLE_WARNING__"] = ""
1829                 mail_message = utils.TemplateSubst(self.Subst, template)
1830                 utils.send_mail(mail_message)
1831
1832                 # Clear up after ourselves
1833                 del self.Subst["__BUG_NUMBER__"]
1834                 del self.Subst["__STABLE_WARNING__"]
1835
1836         if action and self.logger:
1837             self.logger.log(["closing bugs"] + bugs)
1838
1839         summary += "\n"
1840
1841         return summary
1842
1843     ###########################################################################
1844
1845     def announce(self, short_summary, action):
1846         """
1847         Send an announce mail about a new upload.
1848
1849         @type short_summary: string
1850         @param short_summary: Short summary text to include in the mail
1851
1852         @type action: bool
1853         @param action: Set to false no real action will be done.
1854
1855         @rtype: string
1856         @return: Textstring about action taken.
1857
1858         """
1859
1860         cnf = Config()
1861         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1862
1863         # Only do announcements for source uploads with a recent dpkg-dev installed
1864         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1865            self.pkg.changes["architecture"].has_key("source"):
1866             return ""
1867
1868         lists_done = {}
1869         summary = ""
1870
1871         self.Subst["__SHORT_SUMMARY__"] = short_summary
1872
1873         for dist in self.pkg.changes["distribution"].keys():
1874             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1875             if announce_list == "" or lists_done.has_key(announce_list):
1876                 continue
1877
1878             lists_done[announce_list] = 1
1879             summary += "Announcing to %s\n" % (announce_list)
1880
1881             if action:
1882                 self.update_subst()
1883                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1884                 if cnf.get("Dinstall::TrackingServer") and \
1885                    self.pkg.changes["architecture"].has_key("source"):
1886                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1887                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1888
1889                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1890                 utils.send_mail(mail_message)
1891
1892                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1893
1894         if cnf.FindB("Dinstall::CloseBugs"):
1895             summary = self.close_bugs(summary, action)
1896
1897         del self.Subst["__SHORT_SUMMARY__"]
1898
1899         return summary
1900
1901     ###########################################################################
1902     @session_wrapper
1903     def accept (self, summary, short_summary, session=None):
1904         """
1905         Accept an upload.
1906
1907         This moves all files referenced from the .changes into the pool,
1908         sends the accepted mail, announces to lists, closes bugs and
1909         also checks for override disparities. If enabled it will write out
1910         the version history for the BTS Version Tracking and will finally call
1911         L{queue_build}.
1912
1913         @type summary: string
1914         @param summary: Summary text
1915
1916         @type short_summary: string
1917         @param short_summary: Short summary
1918         """
1919
1920         cnf = Config()
1921         stats = SummaryStats()
1922
1923         print "Installing."
1924         self.logger.log(["installing changes", self.pkg.changes_file])
1925
1926         poolfiles = []
1927
1928         # Add the .dsc file to the DB first
1929         for newfile, entry in self.pkg.files.items():
1930             if entry["type"] == "dsc":
1931                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1932                 for j in pfs:
1933                     poolfiles.append(j)
1934
1935         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1936         for newfile, entry in self.pkg.files.items():
1937             if entry["type"] == "deb":
1938                 poolfiles.append(add_deb_to_db(self, newfile, session))
1939
1940         # If this is a sourceful diff only upload that is moving
1941         # cross-component we need to copy the .orig files into the new
1942         # component too for the same reasons as above.
1943         # XXX: mhy: I think this should be in add_dsc_to_db
1944         if self.pkg.changes["architecture"].has_key("source"):
1945             for orig_file in self.pkg.orig_files.keys():
1946                 if not self.pkg.orig_files[orig_file].has_key("id"):
1947                     continue # Skip if it's not in the pool
1948                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1949                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1950                     continue # Skip if the location didn't change
1951
1952                 # Do the move
1953                 oldf = get_poolfile_by_id(orig_file_id, session)
1954                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1955                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1956                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1957
1958                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1959
1960                 # TODO: Care about size/md5sum collisions etc
1961                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1962
1963                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1964                 if newf is None:
1965                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1966                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1967
1968                     session.flush()
1969
1970                     # Don't reference the old file from this changes
1971                     for p in poolfiles:
1972                         if p.file_id == oldf.file_id:
1973                             poolfiles.remove(p)
1974
1975                     poolfiles.append(newf)
1976
1977                     # Fix up the DSC references
1978                     toremove = []
1979
1980                     for df in source.srcfiles:
1981                         if df.poolfile.file_id == oldf.file_id:
1982                             # Add a new DSC entry and mark the old one for deletion
1983                             # Don't do it in the loop so we don't change the thing we're iterating over
1984                             newdscf = DSCFile()
1985                             newdscf.source_id = source.source_id
1986                             newdscf.poolfile_id = newf.file_id
1987                             session.add(newdscf)
1988
1989                             toremove.append(df)
1990
1991                     for df in toremove:
1992                         session.delete(df)
1993
1994                     # Flush our changes
1995                     session.flush()
1996
1997                     # Make sure that our source object is up-to-date
1998                     session.expire(source)
1999
2000         # Add changelog information to the database
2001         self.store_changelog()
2002
2003         # Install the files into the pool
2004         for newfile, entry in self.pkg.files.items():
2005             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2006             utils.move(newfile, destination)
2007             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2008             stats.accept_bytes += float(entry["size"])
2009
2010         # Copy the .changes file across for suite which need it.
2011         copy_changes = {}
2012         for suite_name in self.pkg.changes["distribution"].keys():
2013             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2014                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2015
2016         for dest in copy_changes.keys():
2017             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2018
2019         # We're done - commit the database changes
2020         session.commit()
2021         # Our SQL session will automatically start a new transaction after
2022         # the last commit
2023
2024         # Move the .changes into the 'done' directory
2025         utils.move(self.pkg.changes_file,
2026                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2027
2028         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2029             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2030
2031         self.update_subst()
2032         self.Subst["__SUMMARY__"] = summary
2033         mail_message = utils.TemplateSubst(self.Subst,
2034                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2035         utils.send_mail(mail_message)
2036         self.announce(short_summary, 1)
2037
2038         ## Helper stuff for DebBugs Version Tracking
2039         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2040             if self.pkg.changes["architecture"].has_key("source"):
2041                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2042                 version_history = os.fdopen(fd, 'w')
2043                 version_history.write(self.pkg.dsc["bts changelog"])
2044                 version_history.close()
2045                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2046                                       self.pkg.changes_file[:-8]+".versions")
2047                 os.rename(temp_filename, filename)
2048                 os.chmod(filename, 0644)
2049
2050             # Write out the binary -> source mapping.
2051             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2052             debinfo = os.fdopen(fd, 'w')
2053             for name, entry in sorted(self.pkg.files.items()):
2054                 if entry["type"] == "deb":
2055                     line = " ".join([entry["package"], entry["version"],
2056                                      entry["architecture"], entry["source package"],
2057                                      entry["source version"]])
2058                     debinfo.write(line+"\n")
2059             debinfo.close()
2060             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2061                                   self.pkg.changes_file[:-8]+".debinfo")
2062             os.rename(temp_filename, filename)
2063             os.chmod(filename, 0644)
2064
2065         session.commit()
2066
2067         # Set up our copy queues (e.g. buildd queues)
2068         for suite_name in self.pkg.changes["distribution"].keys():
2069             suite = get_suite(suite_name, session)
2070             for q in suite.copy_queues:
2071                 for f in poolfiles:
2072                     q.add_file_from_pool(f)
2073
2074         session.commit()
2075
2076         # Finally...
2077         stats.accept_count += 1
2078
2079     def check_override(self):
2080         """
2081         Checks override entries for validity. Mails "Override disparity" warnings,
2082         if that feature is enabled.
2083
2084         Abandons the check if
2085           - override disparity checks are disabled
2086           - mail sending is disabled
2087         """
2088
2089         cnf = Config()
2090
2091         # Abandon the check if override disparity checks have been disabled
2092         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2093             return
2094
2095         summary = self.pkg.check_override()
2096
2097         if summary == "":
2098             return
2099
2100         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2101
2102         self.update_subst()
2103         self.Subst["__SUMMARY__"] = summary
2104         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2105         utils.send_mail(mail_message)
2106         del self.Subst["__SUMMARY__"]
2107
2108     ###########################################################################
2109
2110     def remove(self, from_dir=None):
2111         """
2112         Used (for instance) in p-u to remove the package from unchecked
2113
2114         Also removes the package from holding area.
2115         """
2116         if from_dir is None:
2117             from_dir = self.pkg.directory
2118         h = Holding()
2119
2120         for f in self.pkg.files.keys():
2121             os.unlink(os.path.join(from_dir, f))
2122             if os.path.exists(os.path.join(h.holding_dir, f)):
2123                 os.unlink(os.path.join(h.holding_dir, f))
2124
2125         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2126         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2127             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2128
2129     ###########################################################################
2130
2131     def move_to_queue (self, queue):
2132         """
2133         Move files to a destination queue using the permissions in the table
2134         """
2135         h = Holding()
2136         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2137                    queue.path, perms=int(queue.change_perms, 8))
2138         for f in self.pkg.files.keys():
2139             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2140
2141     ###########################################################################
2142
2143     def force_reject(self, reject_files):
2144         """
2145         Forcefully move files from the current directory to the
2146         reject directory.  If any file already exists in the reject
2147         directory it will be moved to the morgue to make way for
2148         the new file.
2149
2150         @type reject_files: dict
2151         @param reject_files: file dictionary
2152
2153         """
2154
2155         cnf = Config()
2156
2157         for file_entry in reject_files:
2158             # Skip any files which don't exist or which we don't have permission to copy.
2159             if os.access(file_entry, os.R_OK) == 0:
2160                 continue
2161
2162             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2163
2164             try:
2165                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2166             except OSError, e:
2167                 # File exists?  Let's find a new name by adding a number
2168                 if e.errno == errno.EEXIST:
2169                     try:
2170                         dest_file = utils.find_next_free(dest_file, 255)
2171                     except NoFreeFilenameError:
2172                         # Something's either gone badly Pete Tong, or
2173                         # someone is trying to exploit us.
2174                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2175                         return
2176
2177                     # Make sure we really got it
2178                     try:
2179                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2180                     except OSError, e:
2181                         # Likewise
2182                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2183                         return
2184                 else:
2185                     raise
2186             # If we got here, we own the destination file, so we can
2187             # safely overwrite it.
2188             utils.move(file_entry, dest_file, 1, perms=0660)
2189             os.close(dest_fd)
2190
2191     ###########################################################################
2192     def do_reject (self, manual=0, reject_message="", notes=""):
2193         """
2194         Reject an upload. If called without a reject message or C{manual} is
2195         true, spawn an editor so the user can write one.
2196
2197         @type manual: bool
2198         @param manual: manual or automated rejection
2199
2200         @type reject_message: string
2201         @param reject_message: A reject message
2202
2203         @return: 0
2204
2205         """
2206         # If we weren't given a manual rejection message, spawn an
2207         # editor so the user can add one in...
2208         if manual and not reject_message:
2209             (fd, temp_filename) = utils.temp_filename()
2210             temp_file = os.fdopen(fd, 'w')
2211             if len(notes) > 0:
2212                 for note in notes:
2213                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2214                                     % (note.author, note.version, note.notedate, note.comment))
2215             temp_file.close()
2216             editor = os.environ.get("EDITOR","vi")
2217             answer = 'E'
2218             while answer == 'E':
2219                 os.system("%s %s" % (editor, temp_filename))
2220                 temp_fh = utils.open_file(temp_filename)
2221                 reject_message = "".join(temp_fh.readlines())
2222                 temp_fh.close()
2223                 print "Reject message:"
2224                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2225                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2226                 answer = "XXX"
2227                 while prompt.find(answer) == -1:
2228                     answer = utils.our_raw_input(prompt)
2229                     m = re_default_answer.search(prompt)
2230                     if answer == "":
2231                         answer = m.group(1)
2232                     answer = answer[:1].upper()
2233             os.unlink(temp_filename)
2234             if answer == 'A':
2235                 return 1
2236             elif answer == 'Q':
2237                 sys.exit(0)
2238
2239         print "Rejecting.\n"
2240
2241         cnf = Config()
2242
2243         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2244         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2245
2246         # Move all the files into the reject directory
2247         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2248         self.force_reject(reject_files)
2249
2250         # If we fail here someone is probably trying to exploit the race
2251         # so let's just raise an exception ...
2252         if os.path.exists(reason_filename):
2253             os.unlink(reason_filename)
2254         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2255
2256         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2257
2258         self.update_subst()
2259         if not manual:
2260             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2261             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2262             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2263             os.write(reason_fd, reject_message)
2264             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2265         else:
2266             # Build up the rejection email
2267             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2268             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2269             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2270             self.Subst["__REJECT_MESSAGE__"] = ""
2271             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2272             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2273             # Write the rejection email out as the <foo>.reason file
2274             os.write(reason_fd, reject_mail_message)
2275
2276         del self.Subst["__REJECTOR_ADDRESS__"]
2277         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2278         del self.Subst["__CC__"]
2279
2280         os.close(reason_fd)
2281
2282         # Send the rejection mail
2283         utils.send_mail(reject_mail_message)
2284
2285         if self.logger:
2286             self.logger.log(["rejected", self.pkg.changes_file])
2287
2288         return 0
2289
2290     ################################################################################
2291     def in_override_p(self, package, component, suite, binary_type, filename, session):
2292         """
2293         Check if a package already has override entries in the DB
2294
2295         @type package: string
2296         @param package: package name
2297
2298         @type component: string
2299         @param component: database id of the component
2300
2301         @type suite: int
2302         @param suite: database id of the suite
2303
2304         @type binary_type: string
2305         @param binary_type: type of the package
2306
2307         @type filename: string
2308         @param filename: filename we check
2309
2310         @return: the database result. But noone cares anyway.
2311
2312         """
2313
2314         cnf = Config()
2315
2316         if binary_type == "": # must be source
2317             file_type = "dsc"
2318         else:
2319             file_type = binary_type
2320
2321         # Override suite name; used for example with proposed-updates
2322         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2323             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2324
2325         result = get_override(package, suite, component, file_type, session)
2326
2327         # If checking for a source package fall back on the binary override type
2328         if file_type == "dsc" and len(result) < 1:
2329             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2330
2331         # Remember the section and priority so we can check them later if appropriate
2332         if len(result) > 0:
2333             result = result[0]
2334             self.pkg.files[filename]["override section"] = result.section.section
2335             self.pkg.files[filename]["override priority"] = result.priority.priority
2336             return result
2337
2338         return None
2339
2340     ################################################################################
2341     def get_anyversion(self, sv_list, suite):
2342         """
2343         @type sv_list: list
2344         @param sv_list: list of (suite, version) tuples to check
2345
2346         @type suite: string
2347         @param suite: suite name
2348
2349         Description: TODO
2350         """
2351         Cnf = Config()
2352         anyversion = None
2353         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2354         for (s, v) in sv_list:
2355             if s in [ x.lower() for x in anysuite ]:
2356                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2357                     anyversion = v
2358
2359         return anyversion
2360
2361     ################################################################################
2362
2363     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2364         """
2365         @type sv_list: list
2366         @param sv_list: list of (suite, version) tuples to check
2367
2368         @type filename: string
2369         @param filename: XXX
2370
2371         @type new_version: string
2372         @param new_version: XXX
2373
2374         Ensure versions are newer than existing packages in target
2375         suites and that cross-suite version checking rules as
2376         set out in the conf file are satisfied.
2377         """
2378
2379         cnf = Config()
2380
2381         # Check versions for each target suite
2382         for target_suite in self.pkg.changes["distribution"].keys():
2383             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2384             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2385
2386             # Enforce "must be newer than target suite" even if conffile omits it
2387             if target_suite not in must_be_newer_than:
2388                 must_be_newer_than.append(target_suite)
2389
2390             for (suite, existent_version) in sv_list:
2391                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2392
2393                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2394                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2395
2396                 if suite in must_be_older_than and vercmp > -1:
2397                     cansave = 0
2398
2399                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2400                         # we really use the other suite, ignoring the conflicting one ...
2401                         addsuite = self.pkg.changes["distribution-version"][suite]
2402
2403                         add_version = self.get_anyversion(sv_list, addsuite)
2404                         target_version = self.get_anyversion(sv_list, target_suite)
2405
2406                         if not add_version:
2407                             # not add_version can only happen if we map to a suite
2408                             # that doesn't enhance the suite we're propup'ing from.
2409                             # so "propup-ver x a b c; map a d" is a problem only if
2410                             # d doesn't enhance a.
2411                             #
2412                             # i think we could always propagate in this case, rather
2413                             # than complaining. either way, this isn't a REJECT issue
2414                             #
2415                             # And - we really should complain to the dorks who configured dak
2416                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2417                             self.pkg.changes.setdefault("propdistribution", {})
2418                             self.pkg.changes["propdistribution"][addsuite] = 1
2419                             cansave = 1
2420                         elif not target_version:
2421                             # not targets_version is true when the package is NEW
2422                             # we could just stick with the "...old version..." REJECT
2423                             # for this, I think.
2424                             self.rejects.append("Won't propogate NEW packages.")
2425                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2426                             # propogation would be redundant. no need to reject though.
2427                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2428                             cansave = 1
2429                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2430                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2431                             # propogate!!
2432                             self.warnings.append("Propogating upload to %s" % (addsuite))
2433                             self.pkg.changes.setdefault("propdistribution", {})
2434                             self.pkg.changes["propdistribution"][addsuite] = 1
2435                             cansave = 1
2436
2437                     if not cansave:
2438                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2439
2440     ################################################################################
2441     def check_binary_against_db(self, filename, session):
2442         # Ensure version is sane
2443         q = session.query(BinAssociation)
2444         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2445         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2446
2447         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2448                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2449
2450         # Check for any existing copies of the file
2451         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2452         q = q.filter_by(version=self.pkg.files[filename]["version"])
2453         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2454
2455         if q.count() > 0:
2456             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2457
2458     ################################################################################
2459
2460     def check_source_against_db(self, filename, session):
2461         source = self.pkg.dsc.get("source")
2462         version = self.pkg.dsc.get("version")
2463
2464         # Ensure version is sane
2465         q = session.query(SrcAssociation)
2466         q = q.join(DBSource).filter(DBSource.source==source)
2467
2468         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2469                                        filename, version, sourceful=True)
2470
2471     ################################################################################
2472     def check_dsc_against_db(self, filename, session):
2473         """
2474
2475         @warning: NB: this function can remove entries from the 'files' index [if
2476          the orig tarball is a duplicate of the one in the archive]; if
2477          you're iterating over 'files' and call this function as part of
2478          the loop, be sure to add a check to the top of the loop to
2479          ensure you haven't just tried to dereference the deleted entry.
2480
2481         """
2482
2483         Cnf = Config()
2484         self.pkg.orig_files = {} # XXX: do we need to clear it?
2485         orig_files = self.pkg.orig_files
2486
2487         # Try and find all files mentioned in the .dsc.  This has
2488         # to work harder to cope with the multiple possible
2489         # locations of an .orig.tar.gz.
2490         # The ordering on the select is needed to pick the newest orig
2491         # when it exists in multiple places.
2492         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2493             found = None
2494             if self.pkg.files.has_key(dsc_name):
2495                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2496                 actual_size = int(self.pkg.files[dsc_name]["size"])
2497                 found = "%s in incoming" % (dsc_name)
2498
2499                 # Check the file does not already exist in the archive
2500                 ql = get_poolfile_like_name(dsc_name, session)
2501
2502                 # Strip out anything that isn't '%s' or '/%s$'
2503                 for i in ql:
2504                     if not i.filename.endswith(dsc_name):
2505                         ql.remove(i)
2506
2507                 # "[dak] has not broken them.  [dak] has fixed a
2508                 # brokenness.  Your crappy hack exploited a bug in
2509                 # the old dinstall.
2510                 #
2511                 # "(Come on!  I thought it was always obvious that
2512                 # one just doesn't release different files with
2513                 # the same name and version.)"
2514                 #                        -- ajk@ on d-devel@l.d.o
2515
2516                 if len(ql) > 0:
2517                     # Ignore exact matches for .orig.tar.gz
2518                     match = 0
2519                     if re_is_orig_source.match(dsc_name):
2520                         for i in ql:
2521                             if self.pkg.files.has_key(dsc_name) and \
2522                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2523                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2524                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2525                                 # TODO: Don't delete the entry, just mark it as not needed
2526                                 # This would fix the stupidity of changing something we often iterate over
2527                                 # whilst we're doing it
2528                                 del self.pkg.files[dsc_name]
2529                                 dsc_entry["files id"] = i.file_id
2530                                 if not orig_files.has_key(dsc_name):
2531                                     orig_files[dsc_name] = {}
2532                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2533                                 match = 1
2534
2535                                 # Don't bitch that we couldn't find this file later
2536                                 try:
2537                                     self.later_check_files.remove(dsc_name)
2538                                 except ValueError:
2539                                     pass
2540
2541
2542                     if not match:
2543                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2544
2545             elif re_is_orig_source.match(dsc_name):
2546                 # Check in the pool
2547                 ql = get_poolfile_like_name(dsc_name, session)
2548
2549                 # Strip out anything that isn't '%s' or '/%s$'
2550                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2551                 for i in ql:
2552                     if not i.filename.endswith(dsc_name):
2553                         ql.remove(i)
2554
2555                 if len(ql) > 0:
2556                     # Unfortunately, we may get more than one match here if,
2557                     # for example, the package was in potato but had an -sa
2558                     # upload in woody.  So we need to choose the right one.
2559
2560                     # default to something sane in case we don't match any or have only one
2561                     x = ql[0]
2562
2563                     if len(ql) > 1:
2564                         for i in ql:
2565                             old_file = os.path.join(i.location.path, i.filename)
2566                             old_file_fh = utils.open_file(old_file)
2567                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2568                             old_file_fh.close()
2569                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2570                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2571                                 x = i
2572
2573                     old_file = os.path.join(i.location.path, i.filename)
2574                     old_file_fh = utils.open_file(old_file)
2575                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2576                     old_file_fh.close()
2577                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2578                     found = old_file
2579                     suite_type = x.location.archive_type
2580                     # need this for updating dsc_files in install()
2581                     dsc_entry["files id"] = x.file_id
2582                     # See install() in process-accepted...
2583                     if not orig_files.has_key(dsc_name):
2584                         orig_files[dsc_name] = {}
2585                     orig_files[dsc_name]["id"] = x.file_id
2586                     orig_files[dsc_name]["path"] = old_file
2587                     orig_files[dsc_name]["location"] = x.location.location_id
2588                 else:
2589                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2590                     # Not there? Check the queue directories...
2591                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2592                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2593                             continue
2594                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2595                         if os.path.exists(in_otherdir):
2596                             in_otherdir_fh = utils.open_file(in_otherdir)
2597                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2598                             in_otherdir_fh.close()
2599                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2600                             found = in_otherdir
2601                             if not orig_files.has_key(dsc_name):
2602                                 orig_files[dsc_name] = {}
2603                             orig_files[dsc_name]["path"] = in_otherdir
2604
2605                     if not found:
2606                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2607                         continue
2608             else:
2609                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2610                 continue
2611             if actual_md5 != dsc_entry["md5sum"]:
2612                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2613             if actual_size != int(dsc_entry["size"]):
2614                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2615
2616     ################################################################################
2617     # This is used by process-new and process-holding to recheck a changes file
2618     # at the time we're running.  It mainly wraps various other internal functions
2619     # and is similar to accepted_checks - these should probably be tidied up
2620     # and combined
2621     def recheck(self, session):
2622         cnf = Config()
2623         for f in self.pkg.files.keys():
2624             # The .orig.tar.gz can disappear out from under us is it's a
2625             # duplicate of one in the archive.
2626             if not self.pkg.files.has_key(f):
2627                 continue
2628
2629             entry = self.pkg.files[f]
2630
2631             # Check that the source still exists
2632             if entry["type"] == "deb":
2633                 source_version = entry["source version"]
2634                 source_package = entry["source package"]
2635                 if not self.pkg.changes["architecture"].has_key("source") \
2636                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2637                     source_epochless_version = re_no_epoch.sub('', source_version)
2638                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2639                     found = False
2640                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2641                         if cnf.has_key("Dir::Queue::%s" % (q)):
2642                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2643                                 found = True
2644                     if not found:
2645                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2646
2647             # Version and file overwrite checks
2648             if entry["type"] == "deb":
2649                 self.check_binary_against_db(f, session)
2650             elif entry["type"] == "dsc":
2651                 self.check_source_against_db(f, session)
2652                 self.check_dsc_against_db(f, session)
2653
2654     ################################################################################
2655     def accepted_checks(self, overwrite_checks, session):
2656         # Recheck anything that relies on the database; since that's not
2657         # frozen between accept and our run time when called from p-a.
2658
2659         # overwrite_checks is set to False when installing to stable/oldstable
2660
2661         propogate={}
2662         nopropogate={}
2663
2664         # Find the .dsc (again)
2665         dsc_filename = None
2666         for f in self.pkg.files.keys():
2667             if self.pkg.files[f]["type"] == "dsc":
2668                 dsc_filename = f
2669
2670         for checkfile in self.pkg.files.keys():
2671             # The .orig.tar.gz can disappear out from under us is it's a
2672             # duplicate of one in the archive.
2673             if not self.pkg.files.has_key(checkfile):
2674                 continue
2675
2676             entry = self.pkg.files[checkfile]
2677
2678             # Check that the source still exists
2679             if entry["type"] == "deb":
2680                 source_version = entry["source version"]
2681                 source_package = entry["source package"]
2682                 if not self.pkg.changes["architecture"].has_key("source") \
2683                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2684                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2685
2686             # Version and file overwrite checks
2687             if overwrite_checks:
2688                 if entry["type"] == "deb":
2689                     self.check_binary_against_db(checkfile, session)
2690                 elif entry["type"] == "dsc":
2691                     self.check_source_against_db(checkfile, session)
2692                     self.check_dsc_against_db(dsc_filename, session)
2693
2694             # propogate in the case it is in the override tables:
2695             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2696                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2697                     propogate[suite] = 1
2698                 else:
2699                     nopropogate[suite] = 1
2700
2701         for suite in propogate.keys():
2702             if suite in nopropogate:
2703                 continue
2704             self.pkg.changes["distribution"][suite] = 1
2705
2706         for checkfile in self.pkg.files.keys():
2707             # Check the package is still in the override tables
2708             for suite in self.pkg.changes["distribution"].keys():
2709                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2710                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2711
2712     ################################################################################
2713     # If any file of an upload has a recent mtime then chances are good
2714     # the file is still being uploaded.
2715
2716     def upload_too_new(self):
2717         cnf = Config()
2718         too_new = False
2719         # Move back to the original directory to get accurate time stamps
2720         cwd = os.getcwd()
2721         os.chdir(self.pkg.directory)
2722         file_list = self.pkg.files.keys()
2723         file_list.extend(self.pkg.dsc_files.keys())
2724         file_list.append(self.pkg.changes_file)
2725         for f in file_list:
2726             try:
2727                 last_modified = time.time()-os.path.getmtime(f)
2728                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2729                     too_new = True
2730                     break
2731             except:
2732                 pass
2733
2734         os.chdir(cwd)
2735         return too_new
2736
2737     def store_changelog(self):
2738
2739         # Skip binary-only upload if it is not a bin-NMU
2740         if not self.pkg.changes['architecture'].has_key('source'):
2741             from daklib.regexes import re_bin_only_nmu
2742             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2743                 return
2744
2745         session = DBConn().session()
2746
2747         # Check if upload already has a changelog entry
2748         query = """SELECT changelog_id FROM changes WHERE source = :source
2749                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2750         if session.execute(query, {'source': self.pkg.changes['source'], \
2751                                    'version': self.pkg.changes['version'], \
2752                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2753             session.commit()
2754             return
2755
2756         # Add current changelog text into changelogs_text table, return created ID
2757         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2758         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2759
2760         # Link ID to the upload available in changes table
2761         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2762                    AND version = :version AND architecture = :architecture"""
2763         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2764                                 'version': self.pkg.changes['version'], \
2765                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2766
2767         session.commit()