]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Start fixing up byhand
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type changes: Upload.Pkg.changes dict
100     @param changes: Changes dictionary
101
102     @type files: Upload.Pkg.files dict
103     @param files: Files dictionary
104
105     @type warn: bool
106     @param warn: Warn if overrides are added for (old)stable
107
108     @rtype: dict
109     @return: dictionary of NEW components.
110
111     """
112     new = {}
113     byhand = {}
114
115     # Build up a list of potentially new things
116     for name, f in files.items():
117         # Keep a record of byhand elements
118         if f["section"] == "byhand":
119             byhand[name] = 1
120             continue
121         pkg = f["package"]
122         priority = f["priority"]
123         section = f["section"]
124         file_type = get_type(f, session)
125         component = f["component"]
126
127         if file_type == "dsc":
128             priority = "source"
129
130         if not new.has_key(pkg):
131             new[pkg] = {}
132             new[pkg]["priority"] = priority
133             new[pkg]["section"] = section
134             new[pkg]["type"] = file_type
135             new[pkg]["component"] = component
136             new[pkg]["files"] = []
137         else:
138             old_type = new[pkg]["type"]
139             if old_type != file_type:
140                 # source gets trumped by deb or udeb
141                 if old_type == "dsc":
142                     new[pkg]["priority"] = priority
143                     new[pkg]["section"] = section
144                     new[pkg]["type"] = file_type
145                     new[pkg]["component"] = component
146
147         new[pkg]["files"].append(name)
148
149         if f.has_key("othercomponents"):
150             new[pkg]["othercomponents"] = f["othercomponents"]
151
152     # Fix up the list of target suites
153     cnf = Config()
154     for suite in changes["suite"].keys():
155         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
156         if override:
157             (olderr, newerr) = (get_suite(suite, session) == None,
158                                 get_suite(override, session) == None)
159             if olderr or newerr:
160                 (oinv, newinv) = ("", "")
161                 if olderr: oinv = "invalid "
162                 if newerr: ninv = "invalid "
163                 print "warning: overriding %ssuite %s to %ssuite %s" % (
164                         oinv, suite, ninv, override)
165             del changes["suite"][suite]
166             changes["suite"][override] = 1
167
168     for suite in changes["suite"].keys():
169         for pkg in new.keys():
170             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
171             if len(ql) > 0:
172                 for file_entry in new[pkg]["files"]:
173                     if files[file_entry].has_key("new"):
174                         del files[file_entry]["new"]
175                 del new[pkg]
176
177     if warn:
178         for s in ['stable', 'oldstable']:
179             if changes["suite"].has_key(s):
180                 print "WARNING: overrides will be added for %s!" % s
181         for pkg in new.keys():
182             if new[pkg].has_key("othercomponents"):
183                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
184
185     return new, byhand
186
187 ################################################################################
188
189 def check_valid(new, session = None):
190     """
191     Check if section and priority for NEW packages exist in database.
192     Additionally does sanity checks:
193       - debian-installer packages have to be udeb (or source)
194       - non debian-installer packages can not be udeb
195       - source priority can only be assigned to dsc file types
196
197     @type new: dict
198     @param new: Dict of new packages with their section, priority and type.
199
200     """
201     for pkg in new.keys():
202         section_name = new[pkg]["section"]
203         priority_name = new[pkg]["priority"]
204         file_type = new[pkg]["type"]
205
206         section = get_section(section_name, session)
207         if section is None:
208             new[pkg]["section id"] = -1
209         else:
210             new[pkg]["section id"] = section.section_id
211
212         priority = get_priority(priority_name, session)
213         if priority is None:
214             new[pkg]["priority id"] = -1
215         else:
216             new[pkg]["priority id"] = priority.priority_id
217
218         # Sanity checks
219         di = section_name.find("debian-installer") != -1
220
221         # If d-i, we must be udeb and vice-versa
222         if     (di and file_type not in ("udeb", "dsc")) or \
223            (not di and file_type == "udeb"):
224             new[pkg]["section id"] = -1
225
226         # If dsc we need to be source and vice-versa
227         if (priority == "source" and file_type != "dsc") or \
228            (priority != "source" and file_type == "dsc"):
229             new[pkg]["priority id"] = -1
230
231 ###############################################################################
232
233 # Used by Upload.check_timestamps
234 class TarTime(object):
235     def __init__(self, future_cutoff, past_cutoff):
236         self.reset()
237         self.future_cutoff = future_cutoff
238         self.past_cutoff = past_cutoff
239
240     def reset(self):
241         self.future_files = {}
242         self.ancient_files = {}
243
244     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
245         if MTime > self.future_cutoff:
246             self.future_files[Name] = MTime
247         if MTime < self.past_cutoff:
248             self.ancient_files[Name] = MTime
249
250 ###############################################################################
251
252 def prod_maintainer(notes, upload):
253     cnf = Config()
254
255     # Here we prepare an editor and get them ready to prod...
256     (fd, temp_filename) = utils.temp_filename()
257     temp_file = os.fdopen(fd, 'w')
258     for note in notes:
259         temp_file.write(note.comment)
260     temp_file.close()
261     editor = os.environ.get("EDITOR","vi")
262     answer = 'E'
263     while answer == 'E':
264         os.system("%s %s" % (editor, temp_filename))
265         temp_fh = utils.open_file(temp_filename)
266         prod_message = "".join(temp_fh.readlines())
267         temp_fh.close()
268         print "Prod message:"
269         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
270         prompt = "[P]rod, Edit, Abandon, Quit ?"
271         answer = "XXX"
272         while prompt.find(answer) == -1:
273             answer = utils.our_raw_input(prompt)
274             m = re_default_answer.search(prompt)
275             if answer == "":
276                 answer = m.group(1)
277             answer = answer[:1].upper()
278     os.unlink(temp_filename)
279     if answer == 'A':
280         return
281     elif answer == 'Q':
282         end()
283         sys.exit(0)
284     # Otherwise, do the proding...
285     user_email_address = utils.whoami() + " <%s>" % (
286         cnf["Dinstall::MyAdminAddress"])
287
288     Subst = upload.Subst
289
290     Subst["__FROM_ADDRESS__"] = user_email_address
291     Subst["__PROD_MESSAGE__"] = prod_message
292     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
293
294     prod_mail_message = utils.TemplateSubst(
295         Subst,cnf["Dir::Templates"]+"/process-new.prod")
296
297     # Send the prod mail
298     utils.send_mail(prod_mail_message)
299
300     print "Sent prodding message"
301
302 ################################################################################
303
304 def edit_note(note, upload, session):
305     # Write the current data to a temporary file
306     (fd, temp_filename) = utils.temp_filename()
307     editor = os.environ.get("EDITOR","vi")
308     answer = 'E'
309     while answer == 'E':
310         os.system("%s %s" % (editor, temp_filename))
311         temp_file = utils.open_file(temp_filename)
312         newnote = temp_file.read().rstrip()
313         temp_file.close()
314         print "New Note:"
315         print utils.prefix_multi_line_string(newnote,"  ")
316         prompt = "[D]one, Edit, Abandon, Quit ?"
317         answer = "XXX"
318         while prompt.find(answer) == -1:
319             answer = utils.our_raw_input(prompt)
320             m = re_default_answer.search(prompt)
321             if answer == "":
322                 answer = m.group(1)
323             answer = answer[:1].upper()
324     os.unlink(temp_filename)
325     if answer == 'A':
326         return
327     elif answer == 'Q':
328         end()
329         sys.exit(0)
330
331     comment = NewComment()
332     comment.package = upload.pkg.changes["source"]
333     comment.version = upload.pkg.changes["version"]
334     comment.comment = newnote
335     comment.author  = utils.whoami()
336     comment.trainee = bool(Options["Trainee"])
337     session.add(comment)
338     session.commit()
339
340 ###############################################################################
341
342 class Upload(object):
343     """
344     Everything that has to do with an upload processed.
345
346     """
347     def __init__(self):
348         self.logger = None
349         self.pkg = Changes()
350         self.reset()
351
352     ###########################################################################
353
354     def reset (self):
355         """ Reset a number of internal variables."""
356
357         # Initialize the substitution template map
358         cnf = Config()
359         self.Subst = {}
360         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
361         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
362         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
363         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
364
365         self.rejects = []
366         self.warnings = []
367         self.notes = []
368
369         self.later_check_files = []
370
371         self.pkg.reset()
372
373     def package_info(self):
374         """
375         Format various messages from this Upload to send to the maintainer.
376         """
377
378         msgs = (
379             ('Reject Reasons', self.rejects),
380             ('Warnings', self.warnings),
381             ('Notes', self.notes),
382         )
383
384         msg = ''
385         for title, messages in msgs:
386             if messages:
387                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
388         msg += '\n\n'
389
390         return msg
391
392     ###########################################################################
393     def update_subst(self):
394         """ Set up the per-package template substitution mappings """
395
396         cnf = Config()
397
398         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
399         if not self.pkg.changes.has_key("architecture") or not \
400            isinstance(self.pkg.changes["architecture"], dict):
401             self.pkg.changes["architecture"] = { "Unknown" : "" }
402
403         # and maintainer2047 may not exist.
404         if not self.pkg.changes.has_key("maintainer2047"):
405             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
406
407         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
408         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
409         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
410
411         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
412         if self.pkg.changes["architecture"].has_key("source") and \
413            self.pkg.changes["changedby822"] != "" and \
414            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
415
416             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
417             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
418             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
419         else:
420             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
421             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
422             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
423
424         # Process policy doesn't set the fingerprint field and I don't want to make it
425         # do it for now as I don't want to have to deal with the case where we accepted
426         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
427         # the meantime so the package will be remarked as rejectable.  Urgh.
428         # TODO: Fix this properly
429         if self.pkg.changes.has_key('fingerprint'):
430             session = DBConn().session()
431             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
432             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
433                 if self.pkg.changes.has_key("sponsoremail"):
434                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
435             session.close()
436
437         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
438             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
439
440         # Apply any global override of the Maintainer field
441         if cnf.get("Dinstall::OverrideMaintainer"):
442             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
443             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
444
445         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
446         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
447         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
448         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
449
450     ###########################################################################
451     def load_changes(self, filename):
452         """
453         Load a changes file and setup a dictionary around it. Also checks for mandantory
454         fields  within.
455
456         @type filename: string
457         @param filename: Changes filename, full path.
458
459         @rtype: boolean
460         @return: whether the changes file was valid or not.  We may want to
461                  reject even if this is True (see what gets put in self.rejects).
462                  This is simply to prevent us even trying things later which will
463                  fail because we couldn't properly parse the file.
464         """
465         Cnf = Config()
466         self.pkg.changes_file = filename
467
468         # Parse the .changes field into a dictionary
469         try:
470             self.pkg.changes.update(parse_changes(filename))
471         except CantOpenError:
472             self.rejects.append("%s: can't read file." % (filename))
473             return False
474         except ParseChangesError, line:
475             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
476             return False
477         except ChangesUnicodeError:
478             self.rejects.append("%s: changes file not proper utf-8" % (filename))
479             return False
480
481         # Parse the Files field from the .changes into another dictionary
482         try:
483             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
484         except ParseChangesError, line:
485             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
486             return False
487         except UnknownFormatError, format:
488             self.rejects.append("%s: unknown format '%s'." % (filename, format))
489             return False
490
491         # Check for mandatory fields
492         for i in ("distribution", "source", "binary", "architecture",
493                   "version", "maintainer", "files", "changes", "description"):
494             if not self.pkg.changes.has_key(i):
495                 # Avoid undefined errors later
496                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
497                 return False
498
499         # Strip a source version in brackets from the source field
500         if re_strip_srcver.search(self.pkg.changes["source"]):
501             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
502
503         # Ensure the source field is a valid package name.
504         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
505             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
506
507         # Split multi-value fields into a lower-level dictionary
508         for i in ("architecture", "distribution", "binary", "closes"):
509             o = self.pkg.changes.get(i, "")
510             if o != "":
511                 del self.pkg.changes[i]
512
513             self.pkg.changes[i] = {}
514
515             for j in o.split():
516                 self.pkg.changes[i][j] = 1
517
518         # Fix the Maintainer: field to be RFC822/2047 compatible
519         try:
520             (self.pkg.changes["maintainer822"],
521              self.pkg.changes["maintainer2047"],
522              self.pkg.changes["maintainername"],
523              self.pkg.changes["maintaineremail"]) = \
524                    fix_maintainer (self.pkg.changes["maintainer"])
525         except ParseMaintError, msg:
526             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
527                    % (filename, self.pkg.changes["maintainer"], msg))
528
529         # ...likewise for the Changed-By: field if it exists.
530         try:
531             (self.pkg.changes["changedby822"],
532              self.pkg.changes["changedby2047"],
533              self.pkg.changes["changedbyname"],
534              self.pkg.changes["changedbyemail"]) = \
535                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
536         except ParseMaintError, msg:
537             self.pkg.changes["changedby822"] = ""
538             self.pkg.changes["changedby2047"] = ""
539             self.pkg.changes["changedbyname"] = ""
540             self.pkg.changes["changedbyemail"] = ""
541
542             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
543                    % (filename, self.pkg.changes["changed-by"], msg))
544
545         # Ensure all the values in Closes: are numbers
546         if self.pkg.changes.has_key("closes"):
547             for i in self.pkg.changes["closes"].keys():
548                 if re_isanum.match (i) == None:
549                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
550
551         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
552         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
553         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
554
555         # Check the .changes is non-empty
556         if not self.pkg.files:
557             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
558             return False
559
560         # Changes was syntactically valid even if we'll reject
561         return True
562
563     ###########################################################################
564
565     def check_distributions(self):
566         "Check and map the Distribution field"
567
568         Cnf = Config()
569
570         # Handle suite mappings
571         for m in Cnf.ValueList("SuiteMappings"):
572             args = m.split()
573             mtype = args[0]
574             if mtype == "map" or mtype == "silent-map":
575                 (source, dest) = args[1:3]
576                 if self.pkg.changes["distribution"].has_key(source):
577                     del self.pkg.changes["distribution"][source]
578                     self.pkg.changes["distribution"][dest] = 1
579                     if mtype != "silent-map":
580                         self.notes.append("Mapping %s to %s." % (source, dest))
581                 if self.pkg.changes.has_key("distribution-version"):
582                     if self.pkg.changes["distribution-version"].has_key(source):
583                         self.pkg.changes["distribution-version"][source]=dest
584             elif mtype == "map-unreleased":
585                 (source, dest) = args[1:3]
586                 if self.pkg.changes["distribution"].has_key(source):
587                     for arch in self.pkg.changes["architecture"].keys():
588                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
589                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
590                             del self.pkg.changes["distribution"][source]
591                             self.pkg.changes["distribution"][dest] = 1
592                             break
593             elif mtype == "ignore":
594                 suite = args[1]
595                 if self.pkg.changes["distribution"].has_key(suite):
596                     del self.pkg.changes["distribution"][suite]
597                     self.warnings.append("Ignoring %s as a target suite." % (suite))
598             elif mtype == "reject":
599                 suite = args[1]
600                 if self.pkg.changes["distribution"].has_key(suite):
601                     self.rejects.append("Uploads to %s are not accepted." % (suite))
602             elif mtype == "propup-version":
603                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
604                 #
605                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
606                 if self.pkg.changes["distribution"].has_key(args[1]):
607                     self.pkg.changes.setdefault("distribution-version", {})
608                     for suite in args[2:]:
609                         self.pkg.changes["distribution-version"][suite] = suite
610
611         # Ensure there is (still) a target distribution
612         if len(self.pkg.changes["distribution"].keys()) < 1:
613             self.rejects.append("No valid distribution remaining.")
614
615         # Ensure target distributions exist
616         for suite in self.pkg.changes["distribution"].keys():
617             if not Cnf.has_key("Suite::%s" % (suite)):
618                 self.rejects.append("Unknown distribution `%s'." % (suite))
619
620     ###########################################################################
621
622     def binary_file_checks(self, f, session):
623         cnf = Config()
624         entry = self.pkg.files[f]
625
626         # Extract package control information
627         deb_file = utils.open_file(f)
628         try:
629             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
630         except:
631             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
632             deb_file.close()
633             # Can't continue, none of the checks on control would work.
634             return
635
636         # Check for mandantory "Description:"
637         deb_file.seek(0)
638         try:
639             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
640         except:
641             self.rejects.append("%s: Missing Description in binary package" % (f))
642             return
643
644         deb_file.close()
645
646         # Check for mandatory fields
647         for field in [ "Package", "Architecture", "Version" ]:
648             if control.Find(field) == None:
649                 # Can't continue
650                 self.rejects.append("%s: No %s field in control." % (f, field))
651                 return
652
653         # Ensure the package name matches the one give in the .changes
654         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
655             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
656
657         # Validate the package field
658         package = control.Find("Package")
659         if not re_valid_pkg_name.match(package):
660             self.rejects.append("%s: invalid package name '%s'." % (f, package))
661
662         # Validate the version field
663         version = control.Find("Version")
664         if not re_valid_version.match(version):
665             self.rejects.append("%s: invalid version number '%s'." % (f, version))
666
667         # Ensure the architecture of the .deb is one we know about.
668         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
669         architecture = control.Find("Architecture")
670         upload_suite = self.pkg.changes["distribution"].keys()[0]
671
672         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
673             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
674             self.rejects.append("Unknown architecture '%s'." % (architecture))
675
676         # Ensure the architecture of the .deb is one of the ones
677         # listed in the .changes.
678         if not self.pkg.changes["architecture"].has_key(architecture):
679             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
680
681         # Sanity-check the Depends field
682         depends = control.Find("Depends")
683         if depends == '':
684             self.rejects.append("%s: Depends field is empty." % (f))
685
686         # Sanity-check the Provides field
687         provides = control.Find("Provides")
688         if provides:
689             provide = re_spacestrip.sub('', provides)
690             if provide == '':
691                 self.rejects.append("%s: Provides field is empty." % (f))
692             prov_list = provide.split(",")
693             for prov in prov_list:
694                 if not re_valid_pkg_name.match(prov):
695                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
696
697         # Check the section & priority match those given in the .changes (non-fatal)
698         if     control.Find("Section") and entry["section"] != "" \
699            and entry["section"] != control.Find("Section"):
700             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
701                                 (f, control.Find("Section", ""), entry["section"]))
702         if control.Find("Priority") and entry["priority"] != "" \
703            and entry["priority"] != control.Find("Priority"):
704             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
705                                 (f, control.Find("Priority", ""), entry["priority"]))
706
707         entry["package"] = package
708         entry["architecture"] = architecture
709         entry["version"] = version
710         entry["maintainer"] = control.Find("Maintainer", "")
711
712         if f.endswith(".udeb"):
713             self.pkg.files[f]["dbtype"] = "udeb"
714         elif f.endswith(".deb"):
715             self.pkg.files[f]["dbtype"] = "deb"
716         else:
717             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
718
719         entry["source"] = control.Find("Source", entry["package"])
720
721         # Get the source version
722         source = entry["source"]
723         source_version = ""
724
725         if source.find("(") != -1:
726             m = re_extract_src_version.match(source)
727             source = m.group(1)
728             source_version = m.group(2)
729
730         if not source_version:
731             source_version = self.pkg.files[f]["version"]
732
733         entry["source package"] = source
734         entry["source version"] = source_version
735
736         # Ensure the filename matches the contents of the .deb
737         m = re_isadeb.match(f)
738
739         #  package name
740         file_package = m.group(1)
741         if entry["package"] != file_package:
742             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
743                                 (f, file_package, entry["dbtype"], entry["package"]))
744         epochless_version = re_no_epoch.sub('', control.Find("Version"))
745
746         #  version
747         file_version = m.group(2)
748         if epochless_version != file_version:
749             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
750                                 (f, file_version, entry["dbtype"], epochless_version))
751
752         #  architecture
753         file_architecture = m.group(3)
754         if entry["architecture"] != file_architecture:
755             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
756                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
757
758         # Check for existent source
759         source_version = entry["source version"]
760         source_package = entry["source package"]
761         if self.pkg.changes["architecture"].has_key("source"):
762             if source_version != self.pkg.changes["version"]:
763                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
764                                     (source_version, f, self.pkg.changes["version"]))
765         else:
766             # Check in the SQL database
767             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
768                 # Check in one of the other directories
769                 source_epochless_version = re_no_epoch.sub('', source_version)
770                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
771                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
772                     entry["byhand"] = 1
773                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
774                     entry["new"] = 1
775                 else:
776                     dsc_file_exists = False
777                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
778                         if cnf.has_key("Dir::Queue::%s" % (myq)):
779                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
780                                 dsc_file_exists = True
781                                 break
782
783                     if not dsc_file_exists:
784                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
785
786         # Check the version and for file overwrites
787         self.check_binary_against_db(f, session)
788
789         # Temporarily disable contents generation until we change the table storage layout
790         #b = Binary(f)
791         #b.scan_package()
792         #if len(b.rejects) > 0:
793         #    for j in b.rejects:
794         #        self.rejects.append(j)
795
796     def source_file_checks(self, f, session):
797         entry = self.pkg.files[f]
798
799         m = re_issource.match(f)
800         if not m:
801             return
802
803         entry["package"] = m.group(1)
804         entry["version"] = m.group(2)
805         entry["type"] = m.group(3)
806
807         # Ensure the source package name matches the Source filed in the .changes
808         if self.pkg.changes["source"] != entry["package"]:
809             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
810
811         # Ensure the source version matches the version in the .changes file
812         if re_is_orig_source.match(f):
813             changes_version = self.pkg.changes["chopversion2"]
814         else:
815             changes_version = self.pkg.changes["chopversion"]
816
817         if changes_version != entry["version"]:
818             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
819
820         # Ensure the .changes lists source in the Architecture field
821         if not self.pkg.changes["architecture"].has_key("source"):
822             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
823
824         # Check the signature of a .dsc file
825         if entry["type"] == "dsc":
826             # check_signature returns either:
827             #  (None, [list, of, rejects]) or (signature, [])
828             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
829             for j in rejects:
830                 self.rejects.append(j)
831
832         entry["architecture"] = "source"
833
834     def per_suite_file_checks(self, f, suite, session):
835         cnf = Config()
836         entry = self.pkg.files[f]
837
838         # Skip byhand
839         if entry.has_key("byhand"):
840             return
841
842         # Check we have fields we need to do these checks
843         oktogo = True
844         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
845             if not entry.has_key(m):
846                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
847                 oktogo = False
848
849         if not oktogo:
850             return
851
852         # Handle component mappings
853         for m in cnf.ValueList("ComponentMappings"):
854             (source, dest) = m.split()
855             if entry["component"] == source:
856                 entry["original component"] = source
857                 entry["component"] = dest
858
859         # Ensure the component is valid for the target suite
860         if cnf.has_key("Suite:%s::Components" % (suite)) and \
861            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
862             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
863             return
864
865         # Validate the component
866         if not get_component(entry["component"], session):
867             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
868             return
869
870         # See if the package is NEW
871         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
872             entry["new"] = 1
873
874         # Validate the priority
875         if entry["priority"].find('/') != -1:
876             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
877
878         # Determine the location
879         location = cnf["Dir::Pool"]
880         l = get_location(location, entry["component"], session=session)
881         if l is None:
882             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
883             entry["location id"] = -1
884         else:
885             entry["location id"] = l.location_id
886
887         # Check the md5sum & size against existing files (if any)
888         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
889
890         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
891                                          entry["size"], entry["md5sum"], entry["location id"])
892
893         if found is None:
894             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
895         elif found is False and poolfile is not None:
896             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
897         else:
898             if poolfile is None:
899                 entry["files id"] = None
900             else:
901                 entry["files id"] = poolfile.file_id
902
903         # Check for packages that have moved from one component to another
904         entry['suite'] = suite
905         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
906         if res.rowcount > 0:
907             entry["othercomponents"] = res.fetchone()[0]
908
909     def check_files(self, action=True):
910         file_keys = self.pkg.files.keys()
911         holding = Holding()
912         cnf = Config()
913
914         if action:
915             cwd = os.getcwd()
916             os.chdir(self.pkg.directory)
917             for f in file_keys:
918                 ret = holding.copy_to_holding(f)
919                 if ret is not None:
920                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
921
922             os.chdir(cwd)
923
924         # check we already know the changes file
925         # [NB: this check must be done post-suite mapping]
926         base_filename = os.path.basename(self.pkg.changes_file)
927
928         session = DBConn().session()
929
930         try:
931             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
932             # if in the pool or in a queue other than unchecked, reject
933             if (dbc.in_queue is None) \
934                    or (dbc.in_queue is not None
935                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
936                 self.rejects.append("%s file already known to dak" % base_filename)
937         except NoResultFound, e:
938             # not known, good
939             pass
940
941         has_binaries = False
942         has_source = False
943
944         for f, entry in self.pkg.files.items():
945             # Ensure the file does not already exist in one of the accepted directories
946             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
947                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
948                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
949                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
950
951             if not re_taint_free.match(f):
952                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
953
954             # Check the file is readable
955             if os.access(f, os.R_OK) == 0:
956                 # When running in -n, copy_to_holding() won't have
957                 # generated the reject_message, so we need to.
958                 if action:
959                     if os.path.exists(f):
960                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
961                     else:
962                         # Don't directly reject, mark to check later to deal with orig's
963                         # we can find in the pool
964                         self.later_check_files.append(f)
965                 entry["type"] = "unreadable"
966                 continue
967
968             # If it's byhand skip remaining checks
969             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
970                 entry["byhand"] = 1
971                 entry["type"] = "byhand"
972
973             # Checks for a binary package...
974             elif re_isadeb.match(f):
975                 has_binaries = True
976                 entry["type"] = "deb"
977
978                 # This routine appends to self.rejects/warnings as appropriate
979                 self.binary_file_checks(f, session)
980
981             # Checks for a source package...
982             elif re_issource.match(f):
983                 has_source = True
984
985                 # This routine appends to self.rejects/warnings as appropriate
986                 self.source_file_checks(f, session)
987
988             # Not a binary or source package?  Assume byhand...
989             else:
990                 entry["byhand"] = 1
991                 entry["type"] = "byhand"
992
993             # Per-suite file checks
994             entry["oldfiles"] = {}
995             for suite in self.pkg.changes["distribution"].keys():
996                 self.per_suite_file_checks(f, suite, session)
997
998         session.close()
999
1000         # If the .changes file says it has source, it must have source.
1001         if self.pkg.changes["architecture"].has_key("source"):
1002             if not has_source:
1003                 self.rejects.append("no source found and Architecture line in changes mention source.")
1004
1005             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1006                 self.rejects.append("source only uploads are not supported.")
1007
1008     ###########################################################################
1009     def check_dsc(self, action=True, session=None):
1010         """Returns bool indicating whether or not the source changes are valid"""
1011         # Ensure there is source to check
1012         if not self.pkg.changes["architecture"].has_key("source"):
1013             return True
1014
1015         # Find the .dsc
1016         dsc_filename = None
1017         for f, entry in self.pkg.files.items():
1018             if entry["type"] == "dsc":
1019                 if dsc_filename:
1020                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1021                     return False
1022                 else:
1023                     dsc_filename = f
1024
1025         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1026         if not dsc_filename:
1027             self.rejects.append("source uploads must contain a dsc file")
1028             return False
1029
1030         # Parse the .dsc file
1031         try:
1032             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1033         except CantOpenError:
1034             # if not -n copy_to_holding() will have done this for us...
1035             if not action:
1036                 self.rejects.append("%s: can't read file." % (dsc_filename))
1037         except ParseChangesError, line:
1038             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1039         except InvalidDscError, line:
1040             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1041         except ChangesUnicodeError:
1042             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1043
1044         # Build up the file list of files mentioned by the .dsc
1045         try:
1046             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1047         except NoFilesFieldError:
1048             self.rejects.append("%s: no Files: field." % (dsc_filename))
1049             return False
1050         except UnknownFormatError, format:
1051             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1052             return False
1053         except ParseChangesError, line:
1054             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1055             return False
1056
1057         # Enforce mandatory fields
1058         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1059             if not self.pkg.dsc.has_key(i):
1060                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1061                 return False
1062
1063         # Validate the source and version fields
1064         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1065             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1066         if not re_valid_version.match(self.pkg.dsc["version"]):
1067             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1068
1069         # Only a limited list of source formats are allowed in each suite
1070         for dist in self.pkg.changes["distribution"].keys():
1071             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1072             if self.pkg.dsc["format"] not in allowed:
1073                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1074
1075         # Validate the Maintainer field
1076         try:
1077             # We ignore the return value
1078             fix_maintainer(self.pkg.dsc["maintainer"])
1079         except ParseMaintError, msg:
1080             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1081                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1082
1083         # Validate the build-depends field(s)
1084         for field_name in [ "build-depends", "build-depends-indep" ]:
1085             field = self.pkg.dsc.get(field_name)
1086             if field:
1087                 # Have apt try to parse them...
1088                 try:
1089                     apt_pkg.ParseSrcDepends(field)
1090                 except:
1091                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1092
1093         # Ensure the version number in the .dsc matches the version number in the .changes
1094         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1095         changes_version = self.pkg.files[dsc_filename]["version"]
1096
1097         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1098             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1099
1100         # Ensure the Files field contain only what's expected
1101         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1102
1103         # Ensure source is newer than existing source in target suites
1104         session = DBConn().session()
1105         self.check_source_against_db(dsc_filename, session)
1106         self.check_dsc_against_db(dsc_filename, session)
1107         session.close()
1108
1109         # Finally, check if we're missing any files
1110         for f in self.later_check_files:
1111             self.rejects.append("Could not find file %s references in changes" % f)
1112
1113         return True
1114
1115     ###########################################################################
1116
1117     def get_changelog_versions(self, source_dir):
1118         """Extracts a the source package and (optionally) grabs the
1119         version history out of debian/changelog for the BTS."""
1120
1121         cnf = Config()
1122
1123         # Find the .dsc (again)
1124         dsc_filename = None
1125         for f in self.pkg.files.keys():
1126             if self.pkg.files[f]["type"] == "dsc":
1127                 dsc_filename = f
1128
1129         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1130         if not dsc_filename:
1131             return
1132
1133         # Create a symlink mirror of the source files in our temporary directory
1134         for f in self.pkg.files.keys():
1135             m = re_issource.match(f)
1136             if m:
1137                 src = os.path.join(source_dir, f)
1138                 # If a file is missing for whatever reason, give up.
1139                 if not os.path.exists(src):
1140                     return
1141                 ftype = m.group(3)
1142                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1143                    self.pkg.orig_files[f].has_key("path"):
1144                     continue
1145                 dest = os.path.join(os.getcwd(), f)
1146                 os.symlink(src, dest)
1147
1148         # If the orig files are not a part of the upload, create symlinks to the
1149         # existing copies.
1150         for orig_file in self.pkg.orig_files.keys():
1151             if not self.pkg.orig_files[orig_file].has_key("path"):
1152                 continue
1153             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1154             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1155
1156         # Extract the source
1157         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1158         (result, output) = commands.getstatusoutput(cmd)
1159         if (result != 0):
1160             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1161             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1162             return
1163
1164         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1165             return
1166
1167         # Get the upstream version
1168         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1169         if re_strip_revision.search(upstr_version):
1170             upstr_version = re_strip_revision.sub('', upstr_version)
1171
1172         # Ensure the changelog file exists
1173         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1174         if not os.path.exists(changelog_filename):
1175             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1176             return
1177
1178         # Parse the changelog
1179         self.pkg.dsc["bts changelog"] = ""
1180         changelog_file = utils.open_file(changelog_filename)
1181         for line in changelog_file.readlines():
1182             m = re_changelog_versions.match(line)
1183             if m:
1184                 self.pkg.dsc["bts changelog"] += line
1185         changelog_file.close()
1186
1187         # Check we found at least one revision in the changelog
1188         if not self.pkg.dsc["bts changelog"]:
1189             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1190
1191     def check_source(self):
1192         # Bail out if:
1193         #    a) there's no source
1194         if not self.pkg.changes["architecture"].has_key("source"):
1195             return
1196
1197         tmpdir = utils.temp_dirname()
1198
1199         # Move into the temporary directory
1200         cwd = os.getcwd()
1201         os.chdir(tmpdir)
1202
1203         # Get the changelog version history
1204         self.get_changelog_versions(cwd)
1205
1206         # Move back and cleanup the temporary tree
1207         os.chdir(cwd)
1208
1209         try:
1210             shutil.rmtree(tmpdir)
1211         except OSError, e:
1212             if e.errno != errno.EACCES:
1213                 print "foobar"
1214                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1215
1216             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1217             # We probably have u-r or u-w directories so chmod everything
1218             # and try again.
1219             cmd = "chmod -R u+rwx %s" % (tmpdir)
1220             result = os.system(cmd)
1221             if result != 0:
1222                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1223             shutil.rmtree(tmpdir)
1224         except Exception, e:
1225             print "foobar2 (%s)" % e
1226             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1227
1228     ###########################################################################
1229     def ensure_hashes(self):
1230         # Make sure we recognise the format of the Files: field in the .changes
1231         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1232         if len(format) == 2:
1233             format = int(format[0]), int(format[1])
1234         else:
1235             format = int(float(format[0])), 0
1236
1237         # We need to deal with the original changes blob, as the fields we need
1238         # might not be in the changes dict serialised into the .dak anymore.
1239         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1240
1241         # Copy the checksums over to the current changes dict.  This will keep
1242         # the existing modifications to it intact.
1243         for field in orig_changes:
1244             if field.startswith('checksums-'):
1245                 self.pkg.changes[field] = orig_changes[field]
1246
1247         # Check for unsupported hashes
1248         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1249             self.rejects.append(j)
1250
1251         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1252             self.rejects.append(j)
1253
1254         # We have to calculate the hash if we have an earlier changes version than
1255         # the hash appears in rather than require it exist in the changes file
1256         for hashname, hashfunc, version in utils.known_hashes:
1257             # TODO: Move _ensure_changes_hash into this class
1258             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1259                 self.rejects.append(j)
1260             if "source" in self.pkg.changes["architecture"]:
1261                 # TODO: Move _ensure_dsc_hash into this class
1262                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1263                     self.rejects.append(j)
1264
1265     def check_hashes(self):
1266         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1267             self.rejects.append(m)
1268
1269         for m in utils.check_size(".changes", self.pkg.files):
1270             self.rejects.append(m)
1271
1272         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1273             self.rejects.append(m)
1274
1275         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1276             self.rejects.append(m)
1277
1278         self.ensure_hashes()
1279
1280     ###########################################################################
1281
1282     def ensure_orig(self, target_dir='.', session=None):
1283         """
1284         Ensures that all orig files mentioned in the changes file are present
1285         in target_dir. If they do not exist, they are symlinked into place.
1286
1287         An list containing the symlinks that were created are returned (so they
1288         can be removed).
1289         """
1290
1291         symlinked = []
1292         cnf = Config()
1293
1294         for filename, entry in self.pkg.dsc_files.iteritems():
1295             if not re_is_orig_source.match(filename):
1296                 # File is not an orig; ignore
1297                 continue
1298
1299             if os.path.exists(filename):
1300                 # File exists, no need to continue
1301                 continue
1302
1303             def symlink_if_valid(path):
1304                 f = utils.open_file(path)
1305                 md5sum = apt_pkg.md5sum(f)
1306                 f.close()
1307
1308                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1309                 expected = (int(entry['size']), entry['md5sum'])
1310
1311                 if fingerprint != expected:
1312                     return False
1313
1314                 dest = os.path.join(target_dir, filename)
1315
1316                 os.symlink(path, dest)
1317                 symlinked.append(dest)
1318
1319                 return True
1320
1321             session_ = session
1322             if session is None:
1323                 session_ = DBConn().session()
1324
1325             found = False
1326
1327             # Look in the pool
1328             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1329                 poolfile_path = os.path.join(
1330                     poolfile.location.path, poolfile.filename
1331                 )
1332
1333                 if symlink_if_valid(poolfile_path):
1334                     found = True
1335                     break
1336
1337             if session is None:
1338                 session_.close()
1339
1340             if found:
1341                 continue
1342
1343             # Look in some other queues for the file
1344             queues = ('New', 'Byhand', 'ProposedUpdates',
1345                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1346
1347             for queue in queues:
1348                 if not cnf.get('Dir::Queue::%s' % queue):
1349                     continue
1350
1351                 queuefile_path = os.path.join(
1352                     cnf['Dir::Queue::%s' % queue], filename
1353                 )
1354
1355                 if not os.path.exists(queuefile_path):
1356                     # Does not exist in this queue
1357                     continue
1358
1359                 if symlink_if_valid(queuefile_path):
1360                     break
1361
1362         return symlinked
1363
1364     ###########################################################################
1365
1366     def check_lintian(self):
1367         """
1368         Extends self.rejects by checking the output of lintian against tags
1369         specified in Dinstall::LintianTags.
1370         """
1371
1372         cnf = Config()
1373
1374         # Don't reject binary uploads
1375         if not self.pkg.changes['architecture'].has_key('source'):
1376             return
1377
1378         # Only check some distributions
1379         for dist in ('unstable', 'experimental'):
1380             if dist in self.pkg.changes['distribution']:
1381                 break
1382         else:
1383             return
1384
1385         # If we do not have a tagfile, don't do anything
1386         tagfile = cnf.get("Dinstall::LintianTags")
1387         if tagfile is None:
1388             return
1389
1390         # Parse the yaml file
1391         sourcefile = file(tagfile, 'r')
1392         sourcecontent = sourcefile.read()
1393         sourcefile.close()
1394
1395         try:
1396             lintiantags = yaml.load(sourcecontent)['lintian']
1397         except yaml.YAMLError, msg:
1398             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1399             return
1400
1401         # Try and find all orig mentioned in the .dsc
1402         symlinked = self.ensure_orig()
1403
1404         # Setup the input file for lintian
1405         fd, temp_filename = utils.temp_filename()
1406         temptagfile = os.fdopen(fd, 'w')
1407         for tags in lintiantags.values():
1408             temptagfile.writelines(['%s\n' % x for x in tags])
1409         temptagfile.close()
1410
1411         try:
1412             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1413                 (temp_filename, self.pkg.changes_file)
1414
1415             result, output = commands.getstatusoutput(cmd)
1416         finally:
1417             # Remove our tempfile and any symlinks we created
1418             os.unlink(temp_filename)
1419
1420             for symlink in symlinked:
1421                 os.unlink(symlink)
1422
1423         if result == 2:
1424             utils.warn("lintian failed for %s [return code: %s]." % \
1425                 (self.pkg.changes_file, result))
1426             utils.warn(utils.prefix_multi_line_string(output, \
1427                 " [possible output:] "))
1428
1429         def log(*txt):
1430             if self.logger:
1431                 self.logger.log(
1432                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1433                 )
1434
1435         # Generate messages
1436         parsed_tags = parse_lintian_output(output)
1437         self.rejects.extend(
1438             generate_reject_messages(parsed_tags, lintiantags, log=log)
1439         )
1440
1441     ###########################################################################
1442     def check_urgency(self):
1443         cnf = Config()
1444         if self.pkg.changes["architecture"].has_key("source"):
1445             if not self.pkg.changes.has_key("urgency"):
1446                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1447             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1448             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1449                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1450                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1451                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1452
1453     ###########################################################################
1454
1455     # Sanity check the time stamps of files inside debs.
1456     # [Files in the near future cause ugly warnings and extreme time
1457     #  travel can cause errors on extraction]
1458
1459     def check_timestamps(self):
1460         Cnf = Config()
1461
1462         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1463         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1464         tar = TarTime(future_cutoff, past_cutoff)
1465
1466         for filename, entry in self.pkg.files.items():
1467             if entry["type"] == "deb":
1468                 tar.reset()
1469                 try:
1470                     deb_file = utils.open_file(filename)
1471                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1472                     deb_file.seek(0)
1473                     try:
1474                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1475                     except SystemError, e:
1476                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1477                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1478                             raise
1479                         deb_file.seek(0)
1480                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1481
1482                     deb_file.close()
1483
1484                     future_files = tar.future_files.keys()
1485                     if future_files:
1486                         num_future_files = len(future_files)
1487                         future_file = future_files[0]
1488                         future_date = tar.future_files[future_file]
1489                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1490                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1491
1492                     ancient_files = tar.ancient_files.keys()
1493                     if ancient_files:
1494                         num_ancient_files = len(ancient_files)
1495                         ancient_file = ancient_files[0]
1496                         ancient_date = tar.ancient_files[ancient_file]
1497                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1498                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1499                 except:
1500                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1501
1502     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1503         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1504             sponsored = False
1505         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1506             sponsored = False
1507             if uid_name == "":
1508                 sponsored = True
1509         else:
1510             sponsored = True
1511             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1512                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1513                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1514                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1515                         self.pkg.changes["sponsoremail"] = uid_email
1516
1517         return sponsored
1518
1519
1520     ###########################################################################
1521     # check_signed_by_key checks
1522     ###########################################################################
1523
1524     def check_signed_by_key(self):
1525         """Ensure the .changes is signed by an authorized uploader."""
1526         session = DBConn().session()
1527
1528         # First of all we check that the person has proper upload permissions
1529         # and that this upload isn't blocked
1530         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1531
1532         if fpr is None:
1533             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1534             return
1535
1536         # TODO: Check that import-keyring adds UIDs properly
1537         if not fpr.uid:
1538             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1539             return
1540
1541         # Check that the fingerprint which uploaded has permission to do so
1542         self.check_upload_permissions(fpr, session)
1543
1544         # Check that this package is not in a transition
1545         self.check_transition(session)
1546
1547         session.close()
1548
1549
1550     def check_upload_permissions(self, fpr, session):
1551         # Check any one-off upload blocks
1552         self.check_upload_blocks(fpr, session)
1553
1554         # Start with DM as a special case
1555         # DM is a special case unfortunately, so we check it first
1556         # (keys with no source access get more access than DMs in one
1557         #  way; DMs can only upload for their packages whether source
1558         #  or binary, whereas keys with no access might be able to
1559         #  upload some binaries)
1560         if fpr.source_acl.access_level == 'dm':
1561             self.check_dm_upload(fpr, session)
1562         else:
1563             # Check source-based permissions for other types
1564             if self.pkg.changes["architecture"].has_key("source") and \
1565                 fpr.source_acl.access_level is None:
1566                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1567                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1568                 self.rejects.append(rej)
1569                 return
1570             # If not a DM, we allow full upload rights
1571             uid_email = "%s@debian.org" % (fpr.uid.uid)
1572             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1573
1574
1575         # Check binary upload permissions
1576         # By this point we know that DMs can't have got here unless they
1577         # are allowed to deal with the package concerned so just apply
1578         # normal checks
1579         if fpr.binary_acl.access_level == 'full':
1580             return
1581
1582         # Otherwise we're in the map case
1583         tmparches = self.pkg.changes["architecture"].copy()
1584         tmparches.pop('source', None)
1585
1586         for bam in fpr.binary_acl_map:
1587             tmparches.pop(bam.architecture.arch_string, None)
1588
1589         if len(tmparches.keys()) > 0:
1590             if fpr.binary_reject:
1591                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1592                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1593                 self.rejects.append(rej)
1594             else:
1595                 # TODO: This is where we'll implement reject vs throw away binaries later
1596                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1597                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1598                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1599                 self.rejects.append(rej)
1600
1601
1602     def check_upload_blocks(self, fpr, session):
1603         """Check whether any upload blocks apply to this source, source
1604            version, uid / fpr combination"""
1605
1606         def block_rej_template(fb):
1607             rej = 'Manual upload block in place for package %s' % fb.source
1608             if fb.version is not None:
1609                 rej += ', version %s' % fb.version
1610             return rej
1611
1612         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1613             # version is None if the block applies to all versions
1614             if fb.version is None or fb.version == self.pkg.changes['version']:
1615                 # Check both fpr and uid - either is enough to cause a reject
1616                 if fb.fpr is not None:
1617                     if fb.fpr.fingerprint == fpr.fingerprint:
1618                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1619                 if fb.uid is not None:
1620                     if fb.uid == fpr.uid:
1621                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1622
1623
1624     def check_dm_upload(self, fpr, session):
1625         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1626         ## none of the uploaded packages are NEW
1627         rej = False
1628         for f in self.pkg.files.keys():
1629             if self.pkg.files[f].has_key("byhand"):
1630                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1631                 rej = True
1632             if self.pkg.files[f].has_key("new"):
1633                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1634                 rej = True
1635
1636         if rej:
1637             return
1638
1639         ## the most recent version of the package uploaded to unstable or
1640         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1641         ## section of its control file
1642         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1643         q = q.join(SrcAssociation)
1644         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1645         q = q.order_by(desc('source.version')).limit(1)
1646
1647         r = q.all()
1648
1649         if len(r) != 1:
1650             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1651             self.rejects.append(rej)
1652             return
1653
1654         r = r[0]
1655         if not r.dm_upload_allowed:
1656             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1657             self.rejects.append(rej)
1658             return
1659
1660         ## the Maintainer: field of the uploaded .changes file corresponds with
1661         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1662         ## uploads)
1663         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1664             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1665
1666         ## the most recent version of the package uploaded to unstable or
1667         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1668         ## non-developer maintainers cannot NMU or hijack packages)
1669
1670         # srcuploaders includes the maintainer
1671         accept = False
1672         for sup in r.srcuploaders:
1673             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1674             # Eww - I hope we never have two people with the same name in Debian
1675             if email == fpr.uid.uid or name == fpr.uid.name:
1676                 accept = True
1677                 break
1678
1679         if not accept:
1680             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1681             return
1682
1683         ## none of the packages are being taken over from other source packages
1684         for b in self.pkg.changes["binary"].keys():
1685             for suite in self.pkg.changes["distribution"].keys():
1686                 q = session.query(DBSource)
1687                 q = q.join(DBBinary).filter_by(package=b)
1688                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1689
1690                 for s in q.all():
1691                     if s.source != self.pkg.changes["source"]:
1692                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1693
1694
1695
1696     def check_transition(self, session):
1697         cnf = Config()
1698
1699         sourcepkg = self.pkg.changes["source"]
1700
1701         # No sourceful upload -> no need to do anything else, direct return
1702         # We also work with unstable uploads, not experimental or those going to some
1703         # proposed-updates queue
1704         if "source" not in self.pkg.changes["architecture"] or \
1705            "unstable" not in self.pkg.changes["distribution"]:
1706             return
1707
1708         # Also only check if there is a file defined (and existant) with
1709         # checks.
1710         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1711         if transpath == "" or not os.path.exists(transpath):
1712             return
1713
1714         # Parse the yaml file
1715         sourcefile = file(transpath, 'r')
1716         sourcecontent = sourcefile.read()
1717         try:
1718             transitions = yaml.load(sourcecontent)
1719         except yaml.YAMLError, msg:
1720             # This shouldn't happen, there is a wrapper to edit the file which
1721             # checks it, but we prefer to be safe than ending up rejecting
1722             # everything.
1723             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1724             return
1725
1726         # Now look through all defined transitions
1727         for trans in transitions:
1728             t = transitions[trans]
1729             source = t["source"]
1730             expected = t["new"]
1731
1732             # Will be None if nothing is in testing.
1733             current = get_source_in_suite(source, "testing", session)
1734             if current is not None:
1735                 compare = apt_pkg.VersionCompare(current.version, expected)
1736
1737             if current is None or compare < 0:
1738                 # This is still valid, the current version in testing is older than
1739                 # the new version we wait for, or there is none in testing yet
1740
1741                 # Check if the source we look at is affected by this.
1742                 if sourcepkg in t['packages']:
1743                     # The source is affected, lets reject it.
1744
1745                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1746                         sourcepkg, trans)
1747
1748                     if current is not None:
1749                         currentlymsg = "at version %s" % (current.version)
1750                     else:
1751                         currentlymsg = "not present in testing"
1752
1753                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1754
1755                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1756 is part of a testing transition designed to get %s migrated (it is
1757 currently %s, we need version %s).  This transition is managed by the
1758 Release Team, and %s is the Release-Team member responsible for it.
1759 Please mail debian-release@lists.debian.org or contact %s directly if you
1760 need further assistance.  You might want to upload to experimental until this
1761 transition is done."""
1762                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1763
1764                     self.rejects.append(rejectmsg)
1765                     return
1766
1767     ###########################################################################
1768     # End check_signed_by_key checks
1769     ###########################################################################
1770
1771     def build_summaries(self):
1772         """ Build a summary of changes the upload introduces. """
1773
1774         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1775
1776         short_summary = summary
1777
1778         # This is for direport's benefit...
1779         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1780
1781         if byhand or new:
1782             summary += "Changes: " + f
1783
1784         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1785
1786         summary += self.announce(short_summary, 0)
1787
1788         return (summary, short_summary)
1789
1790     ###########################################################################
1791
1792     def close_bugs(self, summary, action):
1793         """
1794         Send mail to close bugs as instructed by the closes field in the changes file.
1795         Also add a line to summary if any work was done.
1796
1797         @type summary: string
1798         @param summary: summary text, as given by L{build_summaries}
1799
1800         @type action: bool
1801         @param action: Set to false no real action will be done.
1802
1803         @rtype: string
1804         @return: summary. If action was taken, extended by the list of closed bugs.
1805
1806         """
1807
1808         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1809
1810         bugs = self.pkg.changes["closes"].keys()
1811
1812         if not bugs:
1813             return summary
1814
1815         bugs.sort()
1816         summary += "Closing bugs: "
1817         for bug in bugs:
1818             summary += "%s " % (bug)
1819             if action:
1820                 self.update_subst()
1821                 self.Subst["__BUG_NUMBER__"] = bug
1822                 if self.pkg.changes["distribution"].has_key("stable"):
1823                     self.Subst["__STABLE_WARNING__"] = """
1824 Note that this package is not part of the released stable Debian
1825 distribution.  It may have dependencies on other unreleased software,
1826 or other instabilities.  Please take care if you wish to install it.
1827 The update will eventually make its way into the next released Debian
1828 distribution."""
1829                 else:
1830                     self.Subst["__STABLE_WARNING__"] = ""
1831                 mail_message = utils.TemplateSubst(self.Subst, template)
1832                 utils.send_mail(mail_message)
1833
1834                 # Clear up after ourselves
1835                 del self.Subst["__BUG_NUMBER__"]
1836                 del self.Subst["__STABLE_WARNING__"]
1837
1838         if action and self.logger:
1839             self.logger.log(["closing bugs"] + bugs)
1840
1841         summary += "\n"
1842
1843         return summary
1844
1845     ###########################################################################
1846
1847     def announce(self, short_summary, action):
1848         """
1849         Send an announce mail about a new upload.
1850
1851         @type short_summary: string
1852         @param short_summary: Short summary text to include in the mail
1853
1854         @type action: bool
1855         @param action: Set to false no real action will be done.
1856
1857         @rtype: string
1858         @return: Textstring about action taken.
1859
1860         """
1861
1862         cnf = Config()
1863         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1864
1865         # Only do announcements for source uploads with a recent dpkg-dev installed
1866         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1867            self.pkg.changes["architecture"].has_key("source"):
1868             return ""
1869
1870         lists_done = {}
1871         summary = ""
1872
1873         self.Subst["__SHORT_SUMMARY__"] = short_summary
1874
1875         for dist in self.pkg.changes["distribution"].keys():
1876             announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1877             if announce_list == "" or lists_done.has_key(announce_list):
1878                 continue
1879
1880             lists_done[announce_list] = 1
1881             summary += "Announcing to %s\n" % (announce_list)
1882
1883             if action:
1884                 self.update_subst()
1885                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1886                 if cnf.get("Dinstall::TrackingServer") and \
1887                    self.pkg.changes["architecture"].has_key("source"):
1888                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1889                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1890
1891                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1892                 utils.send_mail(mail_message)
1893
1894                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1895
1896         if cnf.FindB("Dinstall::CloseBugs"):
1897             summary = self.close_bugs(summary, action)
1898
1899         del self.Subst["__SHORT_SUMMARY__"]
1900
1901         return summary
1902
1903     ###########################################################################
1904     @session_wrapper
1905     def accept (self, summary, short_summary, session=None):
1906         """
1907         Accept an upload.
1908
1909         This moves all files referenced from the .changes into the pool,
1910         sends the accepted mail, announces to lists, closes bugs and
1911         also checks for override disparities. If enabled it will write out
1912         the version history for the BTS Version Tracking and will finally call
1913         L{queue_build}.
1914
1915         @type summary: string
1916         @param summary: Summary text
1917
1918         @type short_summary: string
1919         @param short_summary: Short summary
1920         """
1921
1922         cnf = Config()
1923         stats = SummaryStats()
1924
1925         print "Installing."
1926         self.logger.log(["installing changes", self.pkg.changes_file])
1927
1928         poolfiles = []
1929
1930         # Add the .dsc file to the DB first
1931         for newfile, entry in self.pkg.files.items():
1932             if entry["type"] == "dsc":
1933                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1934                 for j in pfs:
1935                     poolfiles.append(j)
1936
1937         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1938         for newfile, entry in self.pkg.files.items():
1939             if entry["type"] == "deb":
1940                 poolfiles.append(add_deb_to_db(self, newfile, session))
1941
1942         # If this is a sourceful diff only upload that is moving
1943         # cross-component we need to copy the .orig files into the new
1944         # component too for the same reasons as above.
1945         # XXX: mhy: I think this should be in add_dsc_to_db
1946         if self.pkg.changes["architecture"].has_key("source"):
1947             for orig_file in self.pkg.orig_files.keys():
1948                 if not self.pkg.orig_files[orig_file].has_key("id"):
1949                     continue # Skip if it's not in the pool
1950                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1951                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1952                     continue # Skip if the location didn't change
1953
1954                 # Do the move
1955                 oldf = get_poolfile_by_id(orig_file_id, session)
1956                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1957                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1958                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1959
1960                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1961
1962                 # TODO: Care about size/md5sum collisions etc
1963                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1964
1965                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1966                 if newf is None:
1967                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1968                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1969
1970                     session.flush()
1971
1972                     # Don't reference the old file from this changes
1973                     for p in poolfiles:
1974                         if p.file_id == oldf.file_id:
1975                             poolfiles.remove(p)
1976
1977                     poolfiles.append(newf)
1978
1979                     # Fix up the DSC references
1980                     toremove = []
1981
1982                     for df in source.srcfiles:
1983                         if df.poolfile.file_id == oldf.file_id:
1984                             # Add a new DSC entry and mark the old one for deletion
1985                             # Don't do it in the loop so we don't change the thing we're iterating over
1986                             newdscf = DSCFile()
1987                             newdscf.source_id = source.source_id
1988                             newdscf.poolfile_id = newf.file_id
1989                             session.add(newdscf)
1990
1991                             toremove.append(df)
1992
1993                     for df in toremove:
1994                         session.delete(df)
1995
1996                     # Flush our changes
1997                     session.flush()
1998
1999                     # Make sure that our source object is up-to-date
2000                     session.expire(source)
2001
2002         # Add changelog information to the database
2003         self.store_changelog()
2004
2005         # Install the files into the pool
2006         for newfile, entry in self.pkg.files.items():
2007             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2008             utils.move(newfile, destination)
2009             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2010             stats.accept_bytes += float(entry["size"])
2011
2012         # Copy the .changes file across for suite which need it.
2013         copy_changes = {}
2014         for suite_name in self.pkg.changes["distribution"].keys():
2015             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2016                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2017
2018         for dest in copy_changes.keys():
2019             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2020
2021         # We're done - commit the database changes
2022         session.commit()
2023         # Our SQL session will automatically start a new transaction after
2024         # the last commit
2025
2026         # Move the .changes into the 'done' directory
2027         utils.move(self.pkg.changes_file,
2028                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2029
2030         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2031             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2032
2033         self.update_subst()
2034         self.Subst["__SUMMARY__"] = summary
2035         mail_message = utils.TemplateSubst(self.Subst,
2036                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2037         utils.send_mail(mail_message)
2038         self.announce(short_summary, 1)
2039
2040         ## Helper stuff for DebBugs Version Tracking
2041         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2042             if self.pkg.changes["architecture"].has_key("source"):
2043                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2044                 version_history = os.fdopen(fd, 'w')
2045                 version_history.write(self.pkg.dsc["bts changelog"])
2046                 version_history.close()
2047                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2048                                       self.pkg.changes_file[:-8]+".versions")
2049                 os.rename(temp_filename, filename)
2050                 os.chmod(filename, 0644)
2051
2052             # Write out the binary -> source mapping.
2053             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2054             debinfo = os.fdopen(fd, 'w')
2055             for name, entry in sorted(self.pkg.files.items()):
2056                 if entry["type"] == "deb":
2057                     line = " ".join([entry["package"], entry["version"],
2058                                      entry["architecture"], entry["source package"],
2059                                      entry["source version"]])
2060                     debinfo.write(line+"\n")
2061             debinfo.close()
2062             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2063                                   self.pkg.changes_file[:-8]+".debinfo")
2064             os.rename(temp_filename, filename)
2065             os.chmod(filename, 0644)
2066
2067         session.commit()
2068
2069         # Set up our copy queues (e.g. buildd queues)
2070         for suite_name in self.pkg.changes["distribution"].keys():
2071             suite = get_suite(suite_name, session)
2072             for q in suite.copy_queues:
2073                 for f in poolfiles:
2074                     q.add_file_from_pool(f)
2075
2076         session.commit()
2077
2078         # Finally...
2079         stats.accept_count += 1
2080
2081     def check_override(self):
2082         """
2083         Checks override entries for validity. Mails "Override disparity" warnings,
2084         if that feature is enabled.
2085
2086         Abandons the check if
2087           - override disparity checks are disabled
2088           - mail sending is disabled
2089         """
2090
2091         cnf = Config()
2092
2093         # Abandon the check if override disparity checks have been disabled
2094         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2095             return
2096
2097         summary = self.pkg.check_override()
2098
2099         if summary == "":
2100             return
2101
2102         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2103
2104         self.update_subst()
2105         self.Subst["__SUMMARY__"] = summary
2106         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2107         utils.send_mail(mail_message)
2108         del self.Subst["__SUMMARY__"]
2109
2110     ###########################################################################
2111
2112     def remove(self, from_dir=None):
2113         """
2114         Used (for instance) in p-u to remove the package from unchecked
2115
2116         Also removes the package from holding area.
2117         """
2118         if from_dir is None:
2119             from_dir = self.pkg.directory
2120         h = Holding()
2121
2122         for f in self.pkg.files.keys():
2123             os.unlink(os.path.join(from_dir, f))
2124             if os.path.exists(os.path.join(h.holding_dir, f)):
2125                 os.unlink(os.path.join(h.holding_dir, f))
2126
2127         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2128         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2129             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2130
2131     ###########################################################################
2132
2133     def move_to_queue (self, queue):
2134         """
2135         Move files to a destination queue using the permissions in the table
2136         """
2137         h = Holding()
2138         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2139                    queue.path, perms=int(queue.change_perms, 8))
2140         for f in self.pkg.files.keys():
2141             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2142
2143     ###########################################################################
2144
2145     def force_reject(self, reject_files):
2146         """
2147         Forcefully move files from the current directory to the
2148         reject directory.  If any file already exists in the reject
2149         directory it will be moved to the morgue to make way for
2150         the new file.
2151
2152         @type reject_files: dict
2153         @param reject_files: file dictionary
2154
2155         """
2156
2157         cnf = Config()
2158
2159         for file_entry in reject_files:
2160             # Skip any files which don't exist or which we don't have permission to copy.
2161             if os.access(file_entry, os.R_OK) == 0:
2162                 continue
2163
2164             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2165
2166             try:
2167                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2168             except OSError, e:
2169                 # File exists?  Let's find a new name by adding a number
2170                 if e.errno == errno.EEXIST:
2171                     try:
2172                         dest_file = utils.find_next_free(dest_file, 255)
2173                     except NoFreeFilenameError:
2174                         # Something's either gone badly Pete Tong, or
2175                         # someone is trying to exploit us.
2176                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2177                         return
2178
2179                     # Make sure we really got it
2180                     try:
2181                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2182                     except OSError, e:
2183                         # Likewise
2184                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2185                         return
2186                 else:
2187                     raise
2188             # If we got here, we own the destination file, so we can
2189             # safely overwrite it.
2190             utils.move(file_entry, dest_file, 1, perms=0660)
2191             os.close(dest_fd)
2192
2193     ###########################################################################
2194     def do_reject (self, manual=0, reject_message="", notes=""):
2195         """
2196         Reject an upload. If called without a reject message or C{manual} is
2197         true, spawn an editor so the user can write one.
2198
2199         @type manual: bool
2200         @param manual: manual or automated rejection
2201
2202         @type reject_message: string
2203         @param reject_message: A reject message
2204
2205         @return: 0
2206
2207         """
2208         # If we weren't given a manual rejection message, spawn an
2209         # editor so the user can add one in...
2210         if manual and not reject_message:
2211             (fd, temp_filename) = utils.temp_filename()
2212             temp_file = os.fdopen(fd, 'w')
2213             if len(notes) > 0:
2214                 for note in notes:
2215                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2216                                     % (note.author, note.version, note.notedate, note.comment))
2217             temp_file.close()
2218             editor = os.environ.get("EDITOR","vi")
2219             answer = 'E'
2220             while answer == 'E':
2221                 os.system("%s %s" % (editor, temp_filename))
2222                 temp_fh = utils.open_file(temp_filename)
2223                 reject_message = "".join(temp_fh.readlines())
2224                 temp_fh.close()
2225                 print "Reject message:"
2226                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2227                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2228                 answer = "XXX"
2229                 while prompt.find(answer) == -1:
2230                     answer = utils.our_raw_input(prompt)
2231                     m = re_default_answer.search(prompt)
2232                     if answer == "":
2233                         answer = m.group(1)
2234                     answer = answer[:1].upper()
2235             os.unlink(temp_filename)
2236             if answer == 'A':
2237                 return 1
2238             elif answer == 'Q':
2239                 sys.exit(0)
2240
2241         print "Rejecting.\n"
2242
2243         cnf = Config()
2244
2245         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2246         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2247
2248         # Move all the files into the reject directory
2249         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2250         self.force_reject(reject_files)
2251
2252         # If we fail here someone is probably trying to exploit the race
2253         # so let's just raise an exception ...
2254         if os.path.exists(reason_filename):
2255             os.unlink(reason_filename)
2256         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2257
2258         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2259
2260         self.update_subst()
2261         if not manual:
2262             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2263             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2264             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2265             os.write(reason_fd, reject_message)
2266             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2267         else:
2268             # Build up the rejection email
2269             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2270             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2271             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2272             self.Subst["__REJECT_MESSAGE__"] = ""
2273             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2274             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2275             # Write the rejection email out as the <foo>.reason file
2276             os.write(reason_fd, reject_mail_message)
2277
2278         del self.Subst["__REJECTOR_ADDRESS__"]
2279         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2280         del self.Subst["__CC__"]
2281
2282         os.close(reason_fd)
2283
2284         # Send the rejection mail
2285         utils.send_mail(reject_mail_message)
2286
2287         if self.logger:
2288             self.logger.log(["rejected", self.pkg.changes_file])
2289
2290         return 0
2291
2292     ################################################################################
2293     def in_override_p(self, package, component, suite, binary_type, filename, session):
2294         """
2295         Check if a package already has override entries in the DB
2296
2297         @type package: string
2298         @param package: package name
2299
2300         @type component: string
2301         @param component: database id of the component
2302
2303         @type suite: int
2304         @param suite: database id of the suite
2305
2306         @type binary_type: string
2307         @param binary_type: type of the package
2308
2309         @type filename: string
2310         @param filename: filename we check
2311
2312         @return: the database result. But noone cares anyway.
2313
2314         """
2315
2316         cnf = Config()
2317
2318         if binary_type == "": # must be source
2319             file_type = "dsc"
2320         else:
2321             file_type = binary_type
2322
2323         # Override suite name; used for example with proposed-updates
2324         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2325             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2326
2327         result = get_override(package, suite, component, file_type, session)
2328
2329         # If checking for a source package fall back on the binary override type
2330         if file_type == "dsc" and len(result) < 1:
2331             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2332
2333         # Remember the section and priority so we can check them later if appropriate
2334         if len(result) > 0:
2335             result = result[0]
2336             self.pkg.files[filename]["override section"] = result.section.section
2337             self.pkg.files[filename]["override priority"] = result.priority.priority
2338             return result
2339
2340         return None
2341
2342     ################################################################################
2343     def get_anyversion(self, sv_list, suite):
2344         """
2345         @type sv_list: list
2346         @param sv_list: list of (suite, version) tuples to check
2347
2348         @type suite: string
2349         @param suite: suite name
2350
2351         Description: TODO
2352         """
2353         Cnf = Config()
2354         anyversion = None
2355         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2356         for (s, v) in sv_list:
2357             if s in [ x.lower() for x in anysuite ]:
2358                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2359                     anyversion = v
2360
2361         return anyversion
2362
2363     ################################################################################
2364
2365     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2366         """
2367         @type sv_list: list
2368         @param sv_list: list of (suite, version) tuples to check
2369
2370         @type filename: string
2371         @param filename: XXX
2372
2373         @type new_version: string
2374         @param new_version: XXX
2375
2376         Ensure versions are newer than existing packages in target
2377         suites and that cross-suite version checking rules as
2378         set out in the conf file are satisfied.
2379         """
2380
2381         cnf = Config()
2382
2383         # Check versions for each target suite
2384         for target_suite in self.pkg.changes["distribution"].keys():
2385             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2386             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2387
2388             # Enforce "must be newer than target suite" even if conffile omits it
2389             if target_suite not in must_be_newer_than:
2390                 must_be_newer_than.append(target_suite)
2391
2392             for (suite, existent_version) in sv_list:
2393                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2394
2395                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2396                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2397
2398                 if suite in must_be_older_than and vercmp > -1:
2399                     cansave = 0
2400
2401                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2402                         # we really use the other suite, ignoring the conflicting one ...
2403                         addsuite = self.pkg.changes["distribution-version"][suite]
2404
2405                         add_version = self.get_anyversion(sv_list, addsuite)
2406                         target_version = self.get_anyversion(sv_list, target_suite)
2407
2408                         if not add_version:
2409                             # not add_version can only happen if we map to a suite
2410                             # that doesn't enhance the suite we're propup'ing from.
2411                             # so "propup-ver x a b c; map a d" is a problem only if
2412                             # d doesn't enhance a.
2413                             #
2414                             # i think we could always propagate in this case, rather
2415                             # than complaining. either way, this isn't a REJECT issue
2416                             #
2417                             # And - we really should complain to the dorks who configured dak
2418                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2419                             self.pkg.changes.setdefault("propdistribution", {})
2420                             self.pkg.changes["propdistribution"][addsuite] = 1
2421                             cansave = 1
2422                         elif not target_version:
2423                             # not targets_version is true when the package is NEW
2424                             # we could just stick with the "...old version..." REJECT
2425                             # for this, I think.
2426                             self.rejects.append("Won't propogate NEW packages.")
2427                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2428                             # propogation would be redundant. no need to reject though.
2429                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2430                             cansave = 1
2431                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2432                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2433                             # propogate!!
2434                             self.warnings.append("Propogating upload to %s" % (addsuite))
2435                             self.pkg.changes.setdefault("propdistribution", {})
2436                             self.pkg.changes["propdistribution"][addsuite] = 1
2437                             cansave = 1
2438
2439                     if not cansave:
2440                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2441
2442     ################################################################################
2443     def check_binary_against_db(self, filename, session):
2444         # Ensure version is sane
2445         q = session.query(BinAssociation)
2446         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2447         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2448
2449         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2450                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2451
2452         # Check for any existing copies of the file
2453         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2454         q = q.filter_by(version=self.pkg.files[filename]["version"])
2455         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2456
2457         if q.count() > 0:
2458             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2459
2460     ################################################################################
2461
2462     def check_source_against_db(self, filename, session):
2463         source = self.pkg.dsc.get("source")
2464         version = self.pkg.dsc.get("version")
2465
2466         # Ensure version is sane
2467         q = session.query(SrcAssociation)
2468         q = q.join(DBSource).filter(DBSource.source==source)
2469
2470         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2471                                        filename, version, sourceful=True)
2472
2473     ################################################################################
2474     def check_dsc_against_db(self, filename, session):
2475         """
2476
2477         @warning: NB: this function can remove entries from the 'files' index [if
2478          the orig tarball is a duplicate of the one in the archive]; if
2479          you're iterating over 'files' and call this function as part of
2480          the loop, be sure to add a check to the top of the loop to
2481          ensure you haven't just tried to dereference the deleted entry.
2482
2483         """
2484
2485         Cnf = Config()
2486         self.pkg.orig_files = {} # XXX: do we need to clear it?
2487         orig_files = self.pkg.orig_files
2488
2489         # Try and find all files mentioned in the .dsc.  This has
2490         # to work harder to cope with the multiple possible
2491         # locations of an .orig.tar.gz.
2492         # The ordering on the select is needed to pick the newest orig
2493         # when it exists in multiple places.
2494         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2495             found = None
2496             if self.pkg.files.has_key(dsc_name):
2497                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2498                 actual_size = int(self.pkg.files[dsc_name]["size"])
2499                 found = "%s in incoming" % (dsc_name)
2500
2501                 # Check the file does not already exist in the archive
2502                 ql = get_poolfile_like_name(dsc_name, session)
2503
2504                 # Strip out anything that isn't '%s' or '/%s$'
2505                 for i in ql:
2506                     if not i.filename.endswith(dsc_name):
2507                         ql.remove(i)
2508
2509                 # "[dak] has not broken them.  [dak] has fixed a
2510                 # brokenness.  Your crappy hack exploited a bug in
2511                 # the old dinstall.
2512                 #
2513                 # "(Come on!  I thought it was always obvious that
2514                 # one just doesn't release different files with
2515                 # the same name and version.)"
2516                 #                        -- ajk@ on d-devel@l.d.o
2517
2518                 if len(ql) > 0:
2519                     # Ignore exact matches for .orig.tar.gz
2520                     match = 0
2521                     if re_is_orig_source.match(dsc_name):
2522                         for i in ql:
2523                             if self.pkg.files.has_key(dsc_name) and \
2524                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2525                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2526                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2527                                 # TODO: Don't delete the entry, just mark it as not needed
2528                                 # This would fix the stupidity of changing something we often iterate over
2529                                 # whilst we're doing it
2530                                 del self.pkg.files[dsc_name]
2531                                 dsc_entry["files id"] = i.file_id
2532                                 if not orig_files.has_key(dsc_name):
2533                                     orig_files[dsc_name] = {}
2534                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2535                                 match = 1
2536
2537                                 # Don't bitch that we couldn't find this file later
2538                                 try:
2539                                     self.later_check_files.remove(dsc_name)
2540                                 except ValueError:
2541                                     pass
2542
2543
2544                     if not match:
2545                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2546
2547             elif re_is_orig_source.match(dsc_name):
2548                 # Check in the pool
2549                 ql = get_poolfile_like_name(dsc_name, session)
2550
2551                 # Strip out anything that isn't '%s' or '/%s$'
2552                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2553                 for i in ql:
2554                     if not i.filename.endswith(dsc_name):
2555                         ql.remove(i)
2556
2557                 if len(ql) > 0:
2558                     # Unfortunately, we may get more than one match here if,
2559                     # for example, the package was in potato but had an -sa
2560                     # upload in woody.  So we need to choose the right one.
2561
2562                     # default to something sane in case we don't match any or have only one
2563                     x = ql[0]
2564
2565                     if len(ql) > 1:
2566                         for i in ql:
2567                             old_file = os.path.join(i.location.path, i.filename)
2568                             old_file_fh = utils.open_file(old_file)
2569                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2570                             old_file_fh.close()
2571                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2572                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2573                                 x = i
2574
2575                     old_file = os.path.join(i.location.path, i.filename)
2576                     old_file_fh = utils.open_file(old_file)
2577                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2578                     old_file_fh.close()
2579                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2580                     found = old_file
2581                     suite_type = x.location.archive_type
2582                     # need this for updating dsc_files in install()
2583                     dsc_entry["files id"] = x.file_id
2584                     # See install() in process-accepted...
2585                     if not orig_files.has_key(dsc_name):
2586                         orig_files[dsc_name] = {}
2587                     orig_files[dsc_name]["id"] = x.file_id
2588                     orig_files[dsc_name]["path"] = old_file
2589                     orig_files[dsc_name]["location"] = x.location.location_id
2590                 else:
2591                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2592                     # Not there? Check the queue directories...
2593                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2594                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2595                             continue
2596                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2597                         if os.path.exists(in_otherdir):
2598                             in_otherdir_fh = utils.open_file(in_otherdir)
2599                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2600                             in_otherdir_fh.close()
2601                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2602                             found = in_otherdir
2603                             if not orig_files.has_key(dsc_name):
2604                                 orig_files[dsc_name] = {}
2605                             orig_files[dsc_name]["path"] = in_otherdir
2606
2607                     if not found:
2608                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2609                         continue
2610             else:
2611                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2612                 continue
2613             if actual_md5 != dsc_entry["md5sum"]:
2614                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2615             if actual_size != int(dsc_entry["size"]):
2616                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2617
2618     ################################################################################
2619     # This is used by process-new and process-holding to recheck a changes file
2620     # at the time we're running.  It mainly wraps various other internal functions
2621     # and is similar to accepted_checks - these should probably be tidied up
2622     # and combined
2623     def recheck(self, session):
2624         cnf = Config()
2625         for f in self.pkg.files.keys():
2626             # The .orig.tar.gz can disappear out from under us is it's a
2627             # duplicate of one in the archive.
2628             if not self.pkg.files.has_key(f):
2629                 continue
2630
2631             entry = self.pkg.files[f]
2632
2633             # Check that the source still exists
2634             if entry["type"] == "deb":
2635                 source_version = entry["source version"]
2636                 source_package = entry["source package"]
2637                 if not self.pkg.changes["architecture"].has_key("source") \
2638                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2639                     source_epochless_version = re_no_epoch.sub('', source_version)
2640                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2641                     found = False
2642                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2643                         if cnf.has_key("Dir::Queue::%s" % (q)):
2644                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2645                                 found = True
2646                     if not found:
2647                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2648
2649             # Version and file overwrite checks
2650             if entry["type"] == "deb":
2651                 self.check_binary_against_db(f, session)
2652             elif entry["type"] == "dsc":
2653                 self.check_source_against_db(f, session)
2654                 self.check_dsc_against_db(f, session)
2655
2656     ################################################################################
2657     def accepted_checks(self, overwrite_checks, session):
2658         # Recheck anything that relies on the database; since that's not
2659         # frozen between accept and our run time when called from p-a.
2660
2661         # overwrite_checks is set to False when installing to stable/oldstable
2662
2663         propogate={}
2664         nopropogate={}
2665
2666         # Find the .dsc (again)
2667         dsc_filename = None
2668         for f in self.pkg.files.keys():
2669             if self.pkg.files[f]["type"] == "dsc":
2670                 dsc_filename = f
2671
2672         for checkfile in self.pkg.files.keys():
2673             # The .orig.tar.gz can disappear out from under us is it's a
2674             # duplicate of one in the archive.
2675             if not self.pkg.files.has_key(checkfile):
2676                 continue
2677
2678             entry = self.pkg.files[checkfile]
2679
2680             # Check that the source still exists
2681             if entry["type"] == "deb":
2682                 source_version = entry["source version"]
2683                 source_package = entry["source package"]
2684                 if not self.pkg.changes["architecture"].has_key("source") \
2685                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2686                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2687
2688             # Version and file overwrite checks
2689             if overwrite_checks:
2690                 if entry["type"] == "deb":
2691                     self.check_binary_against_db(checkfile, session)
2692                 elif entry["type"] == "dsc":
2693                     self.check_source_against_db(checkfile, session)
2694                     self.check_dsc_against_db(dsc_filename, session)
2695
2696             # propogate in the case it is in the override tables:
2697             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2698                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2699                     propogate[suite] = 1
2700                 else:
2701                     nopropogate[suite] = 1
2702
2703         for suite in propogate.keys():
2704             if suite in nopropogate:
2705                 continue
2706             self.pkg.changes["distribution"][suite] = 1
2707
2708         for checkfile in self.pkg.files.keys():
2709             # Check the package is still in the override tables
2710             for suite in self.pkg.changes["distribution"].keys():
2711                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2712                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2713
2714     ################################################################################
2715     # If any file of an upload has a recent mtime then chances are good
2716     # the file is still being uploaded.
2717
2718     def upload_too_new(self):
2719         cnf = Config()
2720         too_new = False
2721         # Move back to the original directory to get accurate time stamps
2722         cwd = os.getcwd()
2723         os.chdir(self.pkg.directory)
2724         file_list = self.pkg.files.keys()
2725         file_list.extend(self.pkg.dsc_files.keys())
2726         file_list.append(self.pkg.changes_file)
2727         for f in file_list:
2728             try:
2729                 last_modified = time.time()-os.path.getmtime(f)
2730                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2731                     too_new = True
2732                     break
2733             except:
2734                 pass
2735
2736         os.chdir(cwd)
2737         return too_new
2738
2739     def store_changelog(self):
2740
2741         # Skip binary-only upload if it is not a bin-NMU
2742         if not self.pkg.changes['architecture'].has_key('source'):
2743             from daklib.regexes import re_bin_only_nmu
2744             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2745                 return
2746
2747         session = DBConn().session()
2748
2749         # Check if upload already has a changelog entry
2750         query = """SELECT changelog_id FROM changes WHERE source = :source
2751                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2752         if session.execute(query, {'source': self.pkg.changes['source'], \
2753                                    'version': self.pkg.changes['version'], \
2754                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2755             session.commit()
2756             return
2757
2758         # Add current changelog text into changelogs_text table, return created ID
2759         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2760         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2761
2762         # Link ID to the upload available in changes table
2763         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2764                    AND version = :version AND architecture = :architecture"""
2765         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2766                                 'version': self.pkg.changes['version'], \
2767                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2768
2769         session.commit()