]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
dec339af60c310940bb09febd921c8f503b048cd
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = bool(Options["Trainee"])
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 class Upload(object):
374     """
375     Everything that has to do with an upload processed.
376
377     """
378     def __init__(self):
379         self.logger = None
380         self.pkg = Changes()
381         self.reset()
382
383     ###########################################################################
384
385     def reset (self):
386         """ Reset a number of internal variables."""
387
388         # Initialize the substitution template map
389         cnf = Config()
390         self.Subst = {}
391         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
392         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
393         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
394         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
395
396         self.rejects = []
397         self.warnings = []
398         self.notes = []
399
400         self.later_check_files = []
401
402         self.pkg.reset()
403
404     def package_info(self):
405         """
406         Format various messages from this Upload to send to the maintainer.
407         """
408
409         msgs = (
410             ('Reject Reasons', self.rejects),
411             ('Warnings', self.warnings),
412             ('Notes', self.notes),
413         )
414
415         msg = ''
416         for title, messages in msgs:
417             if messages:
418                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
419         msg += '\n\n'
420
421         return msg
422
423     ###########################################################################
424     def update_subst(self):
425         """ Set up the per-package template substitution mappings """
426
427         cnf = Config()
428
429         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
430         if not self.pkg.changes.has_key("architecture") or not \
431            isinstance(self.pkg.changes["architecture"], dict):
432             self.pkg.changes["architecture"] = { "Unknown" : "" }
433
434         # and maintainer2047 may not exist.
435         if not self.pkg.changes.has_key("maintainer2047"):
436             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
437
438         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
439         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
440         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
441
442         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
443         if self.pkg.changes["architecture"].has_key("source") and \
444            self.pkg.changes["changedby822"] != "" and \
445            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
446
447             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
448             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
449             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
450         else:
451             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
452             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
453             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
454
455         # Process policy doesn't set the fingerprint field and I don't want to make it
456         # do it for now as I don't want to have to deal with the case where we accepted
457         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
458         # the meantime so the package will be remarked as rejectable.  Urgh.
459         # TODO: Fix this properly
460         if self.pkg.changes.has_key('fingerprint'):
461             session = DBConn().session()
462             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
463             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
464                 if self.pkg.changes.has_key("sponsoremail"):
465                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
466             session.close()
467
468         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
469             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
470
471         # Apply any global override of the Maintainer field
472         if cnf.get("Dinstall::OverrideMaintainer"):
473             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
474             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
475
476         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
477         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
478         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
479         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
480
481     ###########################################################################
482     def load_changes(self, filename):
483         """
484         Load a changes file and setup a dictionary around it. Also checks for mandantory
485         fields  within.
486
487         @type filename: string
488         @param filename: Changes filename, full path.
489
490         @rtype: boolean
491         @return: whether the changes file was valid or not.  We may want to
492                  reject even if this is True (see what gets put in self.rejects).
493                  This is simply to prevent us even trying things later which will
494                  fail because we couldn't properly parse the file.
495         """
496         Cnf = Config()
497         self.pkg.changes_file = filename
498
499         # Parse the .changes field into a dictionary
500         try:
501             self.pkg.changes.update(parse_changes(filename))
502         except CantOpenError:
503             self.rejects.append("%s: can't read file." % (filename))
504             return False
505         except ParseChangesError, line:
506             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
507             return False
508         except ChangesUnicodeError:
509             self.rejects.append("%s: changes file not proper utf-8" % (filename))
510             return False
511
512         # Parse the Files field from the .changes into another dictionary
513         try:
514             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
515         except ParseChangesError, line:
516             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
517             return False
518         except UnknownFormatError, format:
519             self.rejects.append("%s: unknown format '%s'." % (filename, format))
520             return False
521
522         # Check for mandatory fields
523         for i in ("distribution", "source", "binary", "architecture",
524                   "version", "maintainer", "files", "changes", "description"):
525             if not self.pkg.changes.has_key(i):
526                 # Avoid undefined errors later
527                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
528                 return False
529
530         # Strip a source version in brackets from the source field
531         if re_strip_srcver.search(self.pkg.changes["source"]):
532             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
533
534         # Ensure the source field is a valid package name.
535         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
536             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
537
538         # Split multi-value fields into a lower-level dictionary
539         for i in ("architecture", "distribution", "binary", "closes"):
540             o = self.pkg.changes.get(i, "")
541             if o != "":
542                 del self.pkg.changes[i]
543
544             self.pkg.changes[i] = {}
545
546             for j in o.split():
547                 self.pkg.changes[i][j] = 1
548
549         # Fix the Maintainer: field to be RFC822/2047 compatible
550         try:
551             (self.pkg.changes["maintainer822"],
552              self.pkg.changes["maintainer2047"],
553              self.pkg.changes["maintainername"],
554              self.pkg.changes["maintaineremail"]) = \
555                    fix_maintainer (self.pkg.changes["maintainer"])
556         except ParseMaintError, msg:
557             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
558                    % (filename, self.pkg.changes["maintainer"], msg))
559
560         # ...likewise for the Changed-By: field if it exists.
561         try:
562             (self.pkg.changes["changedby822"],
563              self.pkg.changes["changedby2047"],
564              self.pkg.changes["changedbyname"],
565              self.pkg.changes["changedbyemail"]) = \
566                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
567         except ParseMaintError, msg:
568             self.pkg.changes["changedby822"] = ""
569             self.pkg.changes["changedby2047"] = ""
570             self.pkg.changes["changedbyname"] = ""
571             self.pkg.changes["changedbyemail"] = ""
572
573             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
574                    % (filename, self.pkg.changes["changed-by"], msg))
575
576         # Ensure all the values in Closes: are numbers
577         if self.pkg.changes.has_key("closes"):
578             for i in self.pkg.changes["closes"].keys():
579                 if re_isanum.match (i) == None:
580                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
581
582         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
583         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
584         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
585
586         # Check the .changes is non-empty
587         if not self.pkg.files:
588             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
589             return False
590
591         # Changes was syntactically valid even if we'll reject
592         return True
593
594     ###########################################################################
595
596     def check_distributions(self):
597         "Check and map the Distribution field"
598
599         Cnf = Config()
600
601         # Handle suite mappings
602         for m in Cnf.ValueList("SuiteMappings"):
603             args = m.split()
604             mtype = args[0]
605             if mtype == "map" or mtype == "silent-map":
606                 (source, dest) = args[1:3]
607                 if self.pkg.changes["distribution"].has_key(source):
608                     del self.pkg.changes["distribution"][source]
609                     self.pkg.changes["distribution"][dest] = 1
610                     if mtype != "silent-map":
611                         self.notes.append("Mapping %s to %s." % (source, dest))
612                 if self.pkg.changes.has_key("distribution-version"):
613                     if self.pkg.changes["distribution-version"].has_key(source):
614                         self.pkg.changes["distribution-version"][source]=dest
615             elif mtype == "map-unreleased":
616                 (source, dest) = args[1:3]
617                 if self.pkg.changes["distribution"].has_key(source):
618                     for arch in self.pkg.changes["architecture"].keys():
619                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
620                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
621                             del self.pkg.changes["distribution"][source]
622                             self.pkg.changes["distribution"][dest] = 1
623                             break
624             elif mtype == "ignore":
625                 suite = args[1]
626                 if self.pkg.changes["distribution"].has_key(suite):
627                     del self.pkg.changes["distribution"][suite]
628                     self.warnings.append("Ignoring %s as a target suite." % (suite))
629             elif mtype == "reject":
630                 suite = args[1]
631                 if self.pkg.changes["distribution"].has_key(suite):
632                     self.rejects.append("Uploads to %s are not accepted." % (suite))
633             elif mtype == "propup-version":
634                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
635                 #
636                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
637                 if self.pkg.changes["distribution"].has_key(args[1]):
638                     self.pkg.changes.setdefault("distribution-version", {})
639                     for suite in args[2:]:
640                         self.pkg.changes["distribution-version"][suite] = suite
641
642         # Ensure there is (still) a target distribution
643         if len(self.pkg.changes["distribution"].keys()) < 1:
644             self.rejects.append("No valid distribution remaining.")
645
646         # Ensure target distributions exist
647         for suite in self.pkg.changes["distribution"].keys():
648             if not Cnf.has_key("Suite::%s" % (suite)):
649                 self.rejects.append("Unknown distribution `%s'." % (suite))
650
651     ###########################################################################
652
653     def binary_file_checks(self, f, session):
654         cnf = Config()
655         entry = self.pkg.files[f]
656
657         # Extract package control information
658         deb_file = utils.open_file(f)
659         try:
660             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
661         except:
662             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
663             deb_file.close()
664             # Can't continue, none of the checks on control would work.
665             return
666
667         # Check for mandantory "Description:"
668         deb_file.seek(0)
669         try:
670             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
671         except:
672             self.rejects.append("%s: Missing Description in binary package" % (f))
673             return
674
675         deb_file.close()
676
677         # Check for mandatory fields
678         for field in [ "Package", "Architecture", "Version" ]:
679             if control.Find(field) == None:
680                 # Can't continue
681                 self.rejects.append("%s: No %s field in control." % (f, field))
682                 return
683
684         # Ensure the package name matches the one give in the .changes
685         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
686             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
687
688         # Validate the package field
689         package = control.Find("Package")
690         if not re_valid_pkg_name.match(package):
691             self.rejects.append("%s: invalid package name '%s'." % (f, package))
692
693         # Validate the version field
694         version = control.Find("Version")
695         if not re_valid_version.match(version):
696             self.rejects.append("%s: invalid version number '%s'." % (f, version))
697
698         # Ensure the architecture of the .deb is one we know about.
699         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
700         architecture = control.Find("Architecture")
701         upload_suite = self.pkg.changes["distribution"].keys()[0]
702
703         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
704             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
705             self.rejects.append("Unknown architecture '%s'." % (architecture))
706
707         # Ensure the architecture of the .deb is one of the ones
708         # listed in the .changes.
709         if not self.pkg.changes["architecture"].has_key(architecture):
710             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
711
712         # Sanity-check the Depends field
713         depends = control.Find("Depends")
714         if depends == '':
715             self.rejects.append("%s: Depends field is empty." % (f))
716
717         # Sanity-check the Provides field
718         provides = control.Find("Provides")
719         if provides:
720             provide = re_spacestrip.sub('', provides)
721             if provide == '':
722                 self.rejects.append("%s: Provides field is empty." % (f))
723             prov_list = provide.split(",")
724             for prov in prov_list:
725                 if not re_valid_pkg_name.match(prov):
726                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
727
728         # Check the section & priority match those given in the .changes (non-fatal)
729         if     control.Find("Section") and entry["section"] != "" \
730            and entry["section"] != control.Find("Section"):
731             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
732                                 (f, control.Find("Section", ""), entry["section"]))
733         if control.Find("Priority") and entry["priority"] != "" \
734            and entry["priority"] != control.Find("Priority"):
735             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
736                                 (f, control.Find("Priority", ""), entry["priority"]))
737
738         entry["package"] = package
739         entry["architecture"] = architecture
740         entry["version"] = version
741         entry["maintainer"] = control.Find("Maintainer", "")
742
743         if f.endswith(".udeb"):
744             self.pkg.files[f]["dbtype"] = "udeb"
745         elif f.endswith(".deb"):
746             self.pkg.files[f]["dbtype"] = "deb"
747         else:
748             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
749
750         entry["source"] = control.Find("Source", entry["package"])
751
752         # Get the source version
753         source = entry["source"]
754         source_version = ""
755
756         if source.find("(") != -1:
757             m = re_extract_src_version.match(source)
758             source = m.group(1)
759             source_version = m.group(2)
760
761         if not source_version:
762             source_version = self.pkg.files[f]["version"]
763
764         entry["source package"] = source
765         entry["source version"] = source_version
766
767         # Ensure the filename matches the contents of the .deb
768         m = re_isadeb.match(f)
769
770         #  package name
771         file_package = m.group(1)
772         if entry["package"] != file_package:
773             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
774                                 (f, file_package, entry["dbtype"], entry["package"]))
775         epochless_version = re_no_epoch.sub('', control.Find("Version"))
776
777         #  version
778         file_version = m.group(2)
779         if epochless_version != file_version:
780             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
781                                 (f, file_version, entry["dbtype"], epochless_version))
782
783         #  architecture
784         file_architecture = m.group(3)
785         if entry["architecture"] != file_architecture:
786             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
787                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
788
789         # Check for existent source
790         source_version = entry["source version"]
791         source_package = entry["source package"]
792         if self.pkg.changes["architecture"].has_key("source"):
793             if source_version != self.pkg.changes["version"]:
794                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
795                                     (source_version, f, self.pkg.changes["version"]))
796         else:
797             # Check in the SQL database
798             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
799                 # Check in one of the other directories
800                 source_epochless_version = re_no_epoch.sub('', source_version)
801                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
802                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
803                     entry["byhand"] = 1
804                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
805                     entry["new"] = 1
806                 else:
807                     dsc_file_exists = False
808                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
809                         if cnf.has_key("Dir::Queue::%s" % (myq)):
810                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
811                                 dsc_file_exists = True
812                                 break
813
814                     if not dsc_file_exists:
815                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
816
817         # Check the version and for file overwrites
818         self.check_binary_against_db(f, session)
819
820         # Temporarily disable contents generation until we change the table storage layout
821         #b = Binary(f)
822         #b.scan_package()
823         #if len(b.rejects) > 0:
824         #    for j in b.rejects:
825         #        self.rejects.append(j)
826
827     def source_file_checks(self, f, session):
828         entry = self.pkg.files[f]
829
830         m = re_issource.match(f)
831         if not m:
832             return
833
834         entry["package"] = m.group(1)
835         entry["version"] = m.group(2)
836         entry["type"] = m.group(3)
837
838         # Ensure the source package name matches the Source filed in the .changes
839         if self.pkg.changes["source"] != entry["package"]:
840             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
841
842         # Ensure the source version matches the version in the .changes file
843         if re_is_orig_source.match(f):
844             changes_version = self.pkg.changes["chopversion2"]
845         else:
846             changes_version = self.pkg.changes["chopversion"]
847
848         if changes_version != entry["version"]:
849             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
850
851         # Ensure the .changes lists source in the Architecture field
852         if not self.pkg.changes["architecture"].has_key("source"):
853             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
854
855         # Check the signature of a .dsc file
856         if entry["type"] == "dsc":
857             # check_signature returns either:
858             #  (None, [list, of, rejects]) or (signature, [])
859             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
860             for j in rejects:
861                 self.rejects.append(j)
862
863         entry["architecture"] = "source"
864
865     def per_suite_file_checks(self, f, suite, session):
866         cnf = Config()
867         entry = self.pkg.files[f]
868
869         # Skip byhand
870         if entry.has_key("byhand"):
871             return
872
873         # Check we have fields we need to do these checks
874         oktogo = True
875         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
876             if not entry.has_key(m):
877                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
878                 oktogo = False
879
880         if not oktogo:
881             return
882
883         # Handle component mappings
884         for m in cnf.ValueList("ComponentMappings"):
885             (source, dest) = m.split()
886             if entry["component"] == source:
887                 entry["original component"] = source
888                 entry["component"] = dest
889
890         # Ensure the component is valid for the target suite
891         if cnf.has_key("Suite:%s::Components" % (suite)) and \
892            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
893             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
894             return
895
896         # Validate the component
897         if not get_component(entry["component"], session):
898             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
899             return
900
901         # See if the package is NEW
902         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
903             entry["new"] = 1
904
905         # Validate the priority
906         if entry["priority"].find('/') != -1:
907             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
908
909         # Determine the location
910         location = cnf["Dir::Pool"]
911         l = get_location(location, entry["component"], session=session)
912         if l is None:
913             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
914             entry["location id"] = -1
915         else:
916             entry["location id"] = l.location_id
917
918         # Check the md5sum & size against existing files (if any)
919         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
920
921         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
922                                          entry["size"], entry["md5sum"], entry["location id"])
923
924         if found is None:
925             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
926         elif found is False and poolfile is not None:
927             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
928         else:
929             if poolfile is None:
930                 entry["files id"] = None
931             else:
932                 entry["files id"] = poolfile.file_id
933
934         # Check for packages that have moved from one component to another
935         entry['suite'] = suite
936         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
937         if res.rowcount > 0:
938             entry["othercomponents"] = res.fetchone()[0]
939
940     def check_files(self, action=True):
941         file_keys = self.pkg.files.keys()
942         holding = Holding()
943         cnf = Config()
944
945         if action:
946             cwd = os.getcwd()
947             os.chdir(self.pkg.directory)
948             for f in file_keys:
949                 ret = holding.copy_to_holding(f)
950                 if ret is not None:
951                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
952
953             os.chdir(cwd)
954
955         # check we already know the changes file
956         # [NB: this check must be done post-suite mapping]
957         base_filename = os.path.basename(self.pkg.changes_file)
958
959         session = DBConn().session()
960
961         try:
962             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
963             # if in the pool or in a queue other than unchecked, reject
964             if (dbc.in_queue is None) \
965                    or (dbc.in_queue is not None
966                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
967                 self.rejects.append("%s file already known to dak" % base_filename)
968         except NoResultFound, e:
969             # not known, good
970             pass
971
972         has_binaries = False
973         has_source = False
974
975         for f, entry in self.pkg.files.items():
976             # Ensure the file does not already exist in one of the accepted directories
977             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
978                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
979                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
980                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
981
982             if not re_taint_free.match(f):
983                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
984
985             # Check the file is readable
986             if os.access(f, os.R_OK) == 0:
987                 # When running in -n, copy_to_holding() won't have
988                 # generated the reject_message, so we need to.
989                 if action:
990                     if os.path.exists(f):
991                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
992                     else:
993                         # Don't directly reject, mark to check later to deal with orig's
994                         # we can find in the pool
995                         self.later_check_files.append(f)
996                 entry["type"] = "unreadable"
997                 continue
998
999             # If it's byhand skip remaining checks
1000             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1001                 entry["byhand"] = 1
1002                 entry["type"] = "byhand"
1003
1004             # Checks for a binary package...
1005             elif re_isadeb.match(f):
1006                 has_binaries = True
1007                 entry["type"] = "deb"
1008
1009                 # This routine appends to self.rejects/warnings as appropriate
1010                 self.binary_file_checks(f, session)
1011
1012             # Checks for a source package...
1013             elif re_issource.match(f):
1014                 has_source = True
1015
1016                 # This routine appends to self.rejects/warnings as appropriate
1017                 self.source_file_checks(f, session)
1018
1019             # Not a binary or source package?  Assume byhand...
1020             else:
1021                 entry["byhand"] = 1
1022                 entry["type"] = "byhand"
1023
1024             # Per-suite file checks
1025             entry["oldfiles"] = {}
1026             for suite in self.pkg.changes["distribution"].keys():
1027                 self.per_suite_file_checks(f, suite, session)
1028
1029         session.close()
1030
1031         # If the .changes file says it has source, it must have source.
1032         if self.pkg.changes["architecture"].has_key("source"):
1033             if not has_source:
1034                 self.rejects.append("no source found and Architecture line in changes mention source.")
1035
1036             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1037                 self.rejects.append("source only uploads are not supported.")
1038
1039     ###########################################################################
1040     def check_dsc(self, action=True, session=None):
1041         """Returns bool indicating whether or not the source changes are valid"""
1042         # Ensure there is source to check
1043         if not self.pkg.changes["architecture"].has_key("source"):
1044             return True
1045
1046         # Find the .dsc
1047         dsc_filename = None
1048         for f, entry in self.pkg.files.items():
1049             if entry["type"] == "dsc":
1050                 if dsc_filename:
1051                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1052                     return False
1053                 else:
1054                     dsc_filename = f
1055
1056         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1057         if not dsc_filename:
1058             self.rejects.append("source uploads must contain a dsc file")
1059             return False
1060
1061         # Parse the .dsc file
1062         try:
1063             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1064         except CantOpenError:
1065             # if not -n copy_to_holding() will have done this for us...
1066             if not action:
1067                 self.rejects.append("%s: can't read file." % (dsc_filename))
1068         except ParseChangesError, line:
1069             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1070         except InvalidDscError, line:
1071             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1072         except ChangesUnicodeError:
1073             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1074
1075         # Build up the file list of files mentioned by the .dsc
1076         try:
1077             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1078         except NoFilesFieldError:
1079             self.rejects.append("%s: no Files: field." % (dsc_filename))
1080             return False
1081         except UnknownFormatError, format:
1082             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1083             return False
1084         except ParseChangesError, line:
1085             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1086             return False
1087
1088         # Enforce mandatory fields
1089         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1090             if not self.pkg.dsc.has_key(i):
1091                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1092                 return False
1093
1094         # Validate the source and version fields
1095         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1096             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1097         if not re_valid_version.match(self.pkg.dsc["version"]):
1098             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1099
1100         # Only a limited list of source formats are allowed in each suite
1101         for dist in self.pkg.changes["distribution"].keys():
1102             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1103             if self.pkg.dsc["format"] not in allowed:
1104                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1105
1106         # Validate the Maintainer field
1107         try:
1108             # We ignore the return value
1109             fix_maintainer(self.pkg.dsc["maintainer"])
1110         except ParseMaintError, msg:
1111             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1112                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1113
1114         # Validate the build-depends field(s)
1115         for field_name in [ "build-depends", "build-depends-indep" ]:
1116             field = self.pkg.dsc.get(field_name)
1117             if field:
1118                 # Have apt try to parse them...
1119                 try:
1120                     apt_pkg.ParseSrcDepends(field)
1121                 except:
1122                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1123
1124         # Ensure the version number in the .dsc matches the version number in the .changes
1125         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1126         changes_version = self.pkg.files[dsc_filename]["version"]
1127
1128         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1129             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1130
1131         # Ensure the Files field contain only what's expected
1132         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1133
1134         # Ensure source is newer than existing source in target suites
1135         session = DBConn().session()
1136         self.check_source_against_db(dsc_filename, session)
1137         self.check_dsc_against_db(dsc_filename, session)
1138
1139         dbchg = get_dbchange(self.pkg.changes_file, session)
1140
1141         # Finally, check if we're missing any files
1142         for f in self.later_check_files:
1143             print 'XXX: %s' % f
1144             # Check if we've already processed this file if we have a dbchg object
1145             ok = False
1146             if dbchg:
1147                 for pf in dbchg.files:
1148                     if pf.filename == f and pf.processed:
1149                         self.notes.append('%s was already processed so we can go ahead' % f)
1150                         ok = True
1151                         del self.pkg.files[f]
1152             if not ok:
1153                 self.rejects.append("Could not find file %s references in changes" % f)
1154
1155         session.close()
1156
1157         return True
1158
1159     ###########################################################################
1160
1161     def get_changelog_versions(self, source_dir):
1162         """Extracts a the source package and (optionally) grabs the
1163         version history out of debian/changelog for the BTS."""
1164
1165         cnf = Config()
1166
1167         # Find the .dsc (again)
1168         dsc_filename = None
1169         for f in self.pkg.files.keys():
1170             if self.pkg.files[f]["type"] == "dsc":
1171                 dsc_filename = f
1172
1173         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1174         if not dsc_filename:
1175             return
1176
1177         # Create a symlink mirror of the source files in our temporary directory
1178         for f in self.pkg.files.keys():
1179             m = re_issource.match(f)
1180             if m:
1181                 src = os.path.join(source_dir, f)
1182                 # If a file is missing for whatever reason, give up.
1183                 if not os.path.exists(src):
1184                     return
1185                 ftype = m.group(3)
1186                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1187                    self.pkg.orig_files[f].has_key("path"):
1188                     continue
1189                 dest = os.path.join(os.getcwd(), f)
1190                 os.symlink(src, dest)
1191
1192         # If the orig files are not a part of the upload, create symlinks to the
1193         # existing copies.
1194         for orig_file in self.pkg.orig_files.keys():
1195             if not self.pkg.orig_files[orig_file].has_key("path"):
1196                 continue
1197             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1198             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1199
1200         # Extract the source
1201         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1202         (result, output) = commands.getstatusoutput(cmd)
1203         if (result != 0):
1204             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1205             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1206             return
1207
1208         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1209             return
1210
1211         # Get the upstream version
1212         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1213         if re_strip_revision.search(upstr_version):
1214             upstr_version = re_strip_revision.sub('', upstr_version)
1215
1216         # Ensure the changelog file exists
1217         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1218         if not os.path.exists(changelog_filename):
1219             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1220             return
1221
1222         # Parse the changelog
1223         self.pkg.dsc["bts changelog"] = ""
1224         changelog_file = utils.open_file(changelog_filename)
1225         for line in changelog_file.readlines():
1226             m = re_changelog_versions.match(line)
1227             if m:
1228                 self.pkg.dsc["bts changelog"] += line
1229         changelog_file.close()
1230
1231         # Check we found at least one revision in the changelog
1232         if not self.pkg.dsc["bts changelog"]:
1233             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1234
1235     def check_source(self):
1236         # Bail out if:
1237         #    a) there's no source
1238         if not self.pkg.changes["architecture"].has_key("source"):
1239             return
1240
1241         tmpdir = utils.temp_dirname()
1242
1243         # Move into the temporary directory
1244         cwd = os.getcwd()
1245         os.chdir(tmpdir)
1246
1247         # Get the changelog version history
1248         self.get_changelog_versions(cwd)
1249
1250         # Move back and cleanup the temporary tree
1251         os.chdir(cwd)
1252
1253         try:
1254             shutil.rmtree(tmpdir)
1255         except OSError, e:
1256             if e.errno != errno.EACCES:
1257                 print "foobar"
1258                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1259
1260             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1261             # We probably have u-r or u-w directories so chmod everything
1262             # and try again.
1263             cmd = "chmod -R u+rwx %s" % (tmpdir)
1264             result = os.system(cmd)
1265             if result != 0:
1266                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1267             shutil.rmtree(tmpdir)
1268         except Exception, e:
1269             print "foobar2 (%s)" % e
1270             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1271
1272     ###########################################################################
1273     def ensure_hashes(self):
1274         # Make sure we recognise the format of the Files: field in the .changes
1275         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1276         if len(format) == 2:
1277             format = int(format[0]), int(format[1])
1278         else:
1279             format = int(float(format[0])), 0
1280
1281         # We need to deal with the original changes blob, as the fields we need
1282         # might not be in the changes dict serialised into the .dak anymore.
1283         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1284
1285         # Copy the checksums over to the current changes dict.  This will keep
1286         # the existing modifications to it intact.
1287         for field in orig_changes:
1288             if field.startswith('checksums-'):
1289                 self.pkg.changes[field] = orig_changes[field]
1290
1291         # Check for unsupported hashes
1292         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1293             self.rejects.append(j)
1294
1295         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1296             self.rejects.append(j)
1297
1298         # We have to calculate the hash if we have an earlier changes version than
1299         # the hash appears in rather than require it exist in the changes file
1300         for hashname, hashfunc, version in utils.known_hashes:
1301             # TODO: Move _ensure_changes_hash into this class
1302             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1303                 self.rejects.append(j)
1304             if "source" in self.pkg.changes["architecture"]:
1305                 # TODO: Move _ensure_dsc_hash into this class
1306                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1307                     self.rejects.append(j)
1308
1309     def check_hashes(self):
1310         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1311             self.rejects.append(m)
1312
1313         for m in utils.check_size(".changes", self.pkg.files):
1314             self.rejects.append(m)
1315
1316         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1317             self.rejects.append(m)
1318
1319         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1320             self.rejects.append(m)
1321
1322         self.ensure_hashes()
1323
1324     ###########################################################################
1325
1326     def ensure_orig(self, target_dir='.', session=None):
1327         """
1328         Ensures that all orig files mentioned in the changes file are present
1329         in target_dir. If they do not exist, they are symlinked into place.
1330
1331         An list containing the symlinks that were created are returned (so they
1332         can be removed).
1333         """
1334
1335         symlinked = []
1336         cnf = Config()
1337
1338         for filename, entry in self.pkg.dsc_files.iteritems():
1339             if not re_is_orig_source.match(filename):
1340                 # File is not an orig; ignore
1341                 continue
1342
1343             if os.path.exists(filename):
1344                 # File exists, no need to continue
1345                 continue
1346
1347             def symlink_if_valid(path):
1348                 f = utils.open_file(path)
1349                 md5sum = apt_pkg.md5sum(f)
1350                 f.close()
1351
1352                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1353                 expected = (int(entry['size']), entry['md5sum'])
1354
1355                 if fingerprint != expected:
1356                     return False
1357
1358                 dest = os.path.join(target_dir, filename)
1359
1360                 os.symlink(path, dest)
1361                 symlinked.append(dest)
1362
1363                 return True
1364
1365             session_ = session
1366             if session is None:
1367                 session_ = DBConn().session()
1368
1369             found = False
1370
1371             # Look in the pool
1372             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1373                 poolfile_path = os.path.join(
1374                     poolfile.location.path, poolfile.filename
1375                 )
1376
1377                 if symlink_if_valid(poolfile_path):
1378                     found = True
1379                     break
1380
1381             if session is None:
1382                 session_.close()
1383
1384             if found:
1385                 continue
1386
1387             # Look in some other queues for the file
1388             queues = ('New', 'Byhand', 'ProposedUpdates',
1389                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1390
1391             for queue in queues:
1392                 if not cnf.get('Dir::Queue::%s' % queue):
1393                     continue
1394
1395                 queuefile_path = os.path.join(
1396                     cnf['Dir::Queue::%s' % queue], filename
1397                 )
1398
1399                 if not os.path.exists(queuefile_path):
1400                     # Does not exist in this queue
1401                     continue
1402
1403                 if symlink_if_valid(queuefile_path):
1404                     break
1405
1406         return symlinked
1407
1408     ###########################################################################
1409
1410     def check_lintian(self):
1411         """
1412         Extends self.rejects by checking the output of lintian against tags
1413         specified in Dinstall::LintianTags.
1414         """
1415
1416         cnf = Config()
1417
1418         # Don't reject binary uploads
1419         if not self.pkg.changes['architecture'].has_key('source'):
1420             return
1421
1422         # Only check some distributions
1423         for dist in ('unstable', 'experimental'):
1424             if dist in self.pkg.changes['distribution']:
1425                 break
1426         else:
1427             return
1428
1429         # If we do not have a tagfile, don't do anything
1430         tagfile = cnf.get("Dinstall::LintianTags")
1431         if tagfile is None:
1432             return
1433
1434         # Parse the yaml file
1435         sourcefile = file(tagfile, 'r')
1436         sourcecontent = sourcefile.read()
1437         sourcefile.close()
1438
1439         try:
1440             lintiantags = yaml.load(sourcecontent)['lintian']
1441         except yaml.YAMLError, msg:
1442             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1443             return
1444
1445         # Try and find all orig mentioned in the .dsc
1446         symlinked = self.ensure_orig()
1447
1448         # Setup the input file for lintian
1449         fd, temp_filename = utils.temp_filename()
1450         temptagfile = os.fdopen(fd, 'w')
1451         for tags in lintiantags.values():
1452             temptagfile.writelines(['%s\n' % x for x in tags])
1453         temptagfile.close()
1454
1455         try:
1456             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1457                 (temp_filename, self.pkg.changes_file)
1458
1459             result, output = commands.getstatusoutput(cmd)
1460         finally:
1461             # Remove our tempfile and any symlinks we created
1462             os.unlink(temp_filename)
1463
1464             for symlink in symlinked:
1465                 os.unlink(symlink)
1466
1467         if result == 2:
1468             utils.warn("lintian failed for %s [return code: %s]." % \
1469                 (self.pkg.changes_file, result))
1470             utils.warn(utils.prefix_multi_line_string(output, \
1471                 " [possible output:] "))
1472
1473         def log(*txt):
1474             if self.logger:
1475                 self.logger.log(
1476                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1477                 )
1478
1479         # Generate messages
1480         parsed_tags = parse_lintian_output(output)
1481         self.rejects.extend(
1482             generate_reject_messages(parsed_tags, lintiantags, log=log)
1483         )
1484
1485     ###########################################################################
1486     def check_urgency(self):
1487         cnf = Config()
1488         if self.pkg.changes["architecture"].has_key("source"):
1489             if not self.pkg.changes.has_key("urgency"):
1490                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1491             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1492             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1493                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1494                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1495                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1496
1497     ###########################################################################
1498
1499     # Sanity check the time stamps of files inside debs.
1500     # [Files in the near future cause ugly warnings and extreme time
1501     #  travel can cause errors on extraction]
1502
1503     def check_timestamps(self):
1504         Cnf = Config()
1505
1506         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1507         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1508         tar = TarTime(future_cutoff, past_cutoff)
1509
1510         for filename, entry in self.pkg.files.items():
1511             if entry["type"] == "deb":
1512                 tar.reset()
1513                 try:
1514                     deb_file = utils.open_file(filename)
1515                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1516                     deb_file.seek(0)
1517                     try:
1518                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1519                     except SystemError, e:
1520                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1521                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1522                             raise
1523                         deb_file.seek(0)
1524                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1525
1526                     deb_file.close()
1527
1528                     future_files = tar.future_files.keys()
1529                     if future_files:
1530                         num_future_files = len(future_files)
1531                         future_file = future_files[0]
1532                         future_date = tar.future_files[future_file]
1533                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1534                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1535
1536                     ancient_files = tar.ancient_files.keys()
1537                     if ancient_files:
1538                         num_ancient_files = len(ancient_files)
1539                         ancient_file = ancient_files[0]
1540                         ancient_date = tar.ancient_files[ancient_file]
1541                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1542                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1543                 except:
1544                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1545
1546     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1547         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1548             sponsored = False
1549         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1550             sponsored = False
1551             if uid_name == "":
1552                 sponsored = True
1553         else:
1554             sponsored = True
1555             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1556                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1557                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1558                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1559                         self.pkg.changes["sponsoremail"] = uid_email
1560
1561         return sponsored
1562
1563
1564     ###########################################################################
1565     # check_signed_by_key checks
1566     ###########################################################################
1567
1568     def check_signed_by_key(self):
1569         """Ensure the .changes is signed by an authorized uploader."""
1570         session = DBConn().session()
1571
1572         # First of all we check that the person has proper upload permissions
1573         # and that this upload isn't blocked
1574         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1575
1576         if fpr is None:
1577             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1578             return
1579
1580         # TODO: Check that import-keyring adds UIDs properly
1581         if not fpr.uid:
1582             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1583             return
1584
1585         # Check that the fingerprint which uploaded has permission to do so
1586         self.check_upload_permissions(fpr, session)
1587
1588         # Check that this package is not in a transition
1589         self.check_transition(session)
1590
1591         session.close()
1592
1593
1594     def check_upload_permissions(self, fpr, session):
1595         # Check any one-off upload blocks
1596         self.check_upload_blocks(fpr, session)
1597
1598         # Start with DM as a special case
1599         # DM is a special case unfortunately, so we check it first
1600         # (keys with no source access get more access than DMs in one
1601         #  way; DMs can only upload for their packages whether source
1602         #  or binary, whereas keys with no access might be able to
1603         #  upload some binaries)
1604         if fpr.source_acl.access_level == 'dm':
1605             self.check_dm_upload(fpr, session)
1606         else:
1607             # Check source-based permissions for other types
1608             if self.pkg.changes["architecture"].has_key("source") and \
1609                 fpr.source_acl.access_level is None:
1610                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1611                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1612                 self.rejects.append(rej)
1613                 return
1614             # If not a DM, we allow full upload rights
1615             uid_email = "%s@debian.org" % (fpr.uid.uid)
1616             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1617
1618
1619         # Check binary upload permissions
1620         # By this point we know that DMs can't have got here unless they
1621         # are allowed to deal with the package concerned so just apply
1622         # normal checks
1623         if fpr.binary_acl.access_level == 'full':
1624             return
1625
1626         # Otherwise we're in the map case
1627         tmparches = self.pkg.changes["architecture"].copy()
1628         tmparches.pop('source', None)
1629
1630         for bam in fpr.binary_acl_map:
1631             tmparches.pop(bam.architecture.arch_string, None)
1632
1633         if len(tmparches.keys()) > 0:
1634             if fpr.binary_reject:
1635                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1636                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1637                 self.rejects.append(rej)
1638             else:
1639                 # TODO: This is where we'll implement reject vs throw away binaries later
1640                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1641                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1642                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1643                 self.rejects.append(rej)
1644
1645
1646     def check_upload_blocks(self, fpr, session):
1647         """Check whether any upload blocks apply to this source, source
1648            version, uid / fpr combination"""
1649
1650         def block_rej_template(fb):
1651             rej = 'Manual upload block in place for package %s' % fb.source
1652             if fb.version is not None:
1653                 rej += ', version %s' % fb.version
1654             return rej
1655
1656         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1657             # version is None if the block applies to all versions
1658             if fb.version is None or fb.version == self.pkg.changes['version']:
1659                 # Check both fpr and uid - either is enough to cause a reject
1660                 if fb.fpr is not None:
1661                     if fb.fpr.fingerprint == fpr.fingerprint:
1662                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1663                 if fb.uid is not None:
1664                     if fb.uid == fpr.uid:
1665                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1666
1667
1668     def check_dm_upload(self, fpr, session):
1669         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1670         ## none of the uploaded packages are NEW
1671         rej = False
1672         for f in self.pkg.files.keys():
1673             if self.pkg.files[f].has_key("byhand"):
1674                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1675                 rej = True
1676             if self.pkg.files[f].has_key("new"):
1677                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1678                 rej = True
1679
1680         if rej:
1681             return
1682
1683         ## the most recent version of the package uploaded to unstable or
1684         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1685         ## section of its control file
1686         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1687         q = q.join(SrcAssociation)
1688         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1689         q = q.order_by(desc('source.version')).limit(1)
1690
1691         r = q.all()
1692
1693         if len(r) != 1:
1694             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1695             self.rejects.append(rej)
1696             return
1697
1698         r = r[0]
1699         if not r.dm_upload_allowed:
1700             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1701             self.rejects.append(rej)
1702             return
1703
1704         ## the Maintainer: field of the uploaded .changes file corresponds with
1705         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1706         ## uploads)
1707         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1708             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1709
1710         ## the most recent version of the package uploaded to unstable or
1711         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1712         ## non-developer maintainers cannot NMU or hijack packages)
1713
1714         # srcuploaders includes the maintainer
1715         accept = False
1716         for sup in r.srcuploaders:
1717             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1718             # Eww - I hope we never have two people with the same name in Debian
1719             if email == fpr.uid.uid or name == fpr.uid.name:
1720                 accept = True
1721                 break
1722
1723         if not accept:
1724             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1725             return
1726
1727         ## none of the packages are being taken over from other source packages
1728         for b in self.pkg.changes["binary"].keys():
1729             for suite in self.pkg.changes["distribution"].keys():
1730                 q = session.query(DBSource)
1731                 q = q.join(DBBinary).filter_by(package=b)
1732                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1733
1734                 for s in q.all():
1735                     if s.source != self.pkg.changes["source"]:
1736                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1737
1738
1739
1740     def check_transition(self, session):
1741         cnf = Config()
1742
1743         sourcepkg = self.pkg.changes["source"]
1744
1745         # No sourceful upload -> no need to do anything else, direct return
1746         # We also work with unstable uploads, not experimental or those going to some
1747         # proposed-updates queue
1748         if "source" not in self.pkg.changes["architecture"] or \
1749            "unstable" not in self.pkg.changes["distribution"]:
1750             return
1751
1752         # Also only check if there is a file defined (and existant) with
1753         # checks.
1754         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1755         if transpath == "" or not os.path.exists(transpath):
1756             return
1757
1758         # Parse the yaml file
1759         sourcefile = file(transpath, 'r')
1760         sourcecontent = sourcefile.read()
1761         try:
1762             transitions = yaml.load(sourcecontent)
1763         except yaml.YAMLError, msg:
1764             # This shouldn't happen, there is a wrapper to edit the file which
1765             # checks it, but we prefer to be safe than ending up rejecting
1766             # everything.
1767             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1768             return
1769
1770         # Now look through all defined transitions
1771         for trans in transitions:
1772             t = transitions[trans]
1773             source = t["source"]
1774             expected = t["new"]
1775
1776             # Will be None if nothing is in testing.
1777             current = get_source_in_suite(source, "testing", session)
1778             if current is not None:
1779                 compare = apt_pkg.VersionCompare(current.version, expected)
1780
1781             if current is None or compare < 0:
1782                 # This is still valid, the current version in testing is older than
1783                 # the new version we wait for, or there is none in testing yet
1784
1785                 # Check if the source we look at is affected by this.
1786                 if sourcepkg in t['packages']:
1787                     # The source is affected, lets reject it.
1788
1789                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1790                         sourcepkg, trans)
1791
1792                     if current is not None:
1793                         currentlymsg = "at version %s" % (current.version)
1794                     else:
1795                         currentlymsg = "not present in testing"
1796
1797                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1798
1799                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1800 is part of a testing transition designed to get %s migrated (it is
1801 currently %s, we need version %s).  This transition is managed by the
1802 Release Team, and %s is the Release-Team member responsible for it.
1803 Please mail debian-release@lists.debian.org or contact %s directly if you
1804 need further assistance.  You might want to upload to experimental until this
1805 transition is done."""
1806                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1807
1808                     self.rejects.append(rejectmsg)
1809                     return
1810
1811     ###########################################################################
1812     # End check_signed_by_key checks
1813     ###########################################################################
1814
1815     def build_summaries(self):
1816         """ Build a summary of changes the upload introduces. """
1817
1818         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1819
1820         short_summary = summary
1821
1822         # This is for direport's benefit...
1823         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1824
1825         if byhand or new:
1826             summary += "Changes: " + f
1827
1828         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1829
1830         summary += self.announce(short_summary, 0)
1831
1832         return (summary, short_summary)
1833
1834     ###########################################################################
1835
1836     def close_bugs(self, summary, action):
1837         """
1838         Send mail to close bugs as instructed by the closes field in the changes file.
1839         Also add a line to summary if any work was done.
1840
1841         @type summary: string
1842         @param summary: summary text, as given by L{build_summaries}
1843
1844         @type action: bool
1845         @param action: Set to false no real action will be done.
1846
1847         @rtype: string
1848         @return: summary. If action was taken, extended by the list of closed bugs.
1849
1850         """
1851
1852         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1853
1854         bugs = self.pkg.changes["closes"].keys()
1855
1856         if not bugs:
1857             return summary
1858
1859         bugs.sort()
1860         summary += "Closing bugs: "
1861         for bug in bugs:
1862             summary += "%s " % (bug)
1863             if action:
1864                 self.update_subst()
1865                 self.Subst["__BUG_NUMBER__"] = bug
1866                 if self.pkg.changes["distribution"].has_key("stable"):
1867                     self.Subst["__STABLE_WARNING__"] = """
1868 Note that this package is not part of the released stable Debian
1869 distribution.  It may have dependencies on other unreleased software,
1870 or other instabilities.  Please take care if you wish to install it.
1871 The update will eventually make its way into the next released Debian
1872 distribution."""
1873                 else:
1874                     self.Subst["__STABLE_WARNING__"] = ""
1875                 mail_message = utils.TemplateSubst(self.Subst, template)
1876                 utils.send_mail(mail_message)
1877
1878                 # Clear up after ourselves
1879                 del self.Subst["__BUG_NUMBER__"]
1880                 del self.Subst["__STABLE_WARNING__"]
1881
1882         if action and self.logger:
1883             self.logger.log(["closing bugs"] + bugs)
1884
1885         summary += "\n"
1886
1887         return summary
1888
1889     ###########################################################################
1890
1891     def announce(self, short_summary, action):
1892         """
1893         Send an announce mail about a new upload.
1894
1895         @type short_summary: string
1896         @param short_summary: Short summary text to include in the mail
1897
1898         @type action: bool
1899         @param action: Set to false no real action will be done.
1900
1901         @rtype: string
1902         @return: Textstring about action taken.
1903
1904         """
1905
1906         cnf = Config()
1907         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1908
1909         # Only do announcements for source uploads with a recent dpkg-dev installed
1910         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1911            self.pkg.changes["architecture"].has_key("source"):
1912             return ""
1913
1914         lists_done = {}
1915         summary = ""
1916
1917         self.Subst["__SHORT_SUMMARY__"] = short_summary
1918
1919         for dist in self.pkg.changes["distribution"].keys():
1920             suite = get_suite(dist)
1921             if suite is None: continue
1922             announce_list = suite.announce
1923             if announce_list == "" or lists_done.has_key(announce_list):
1924                 continue
1925
1926             lists_done[announce_list] = 1
1927             summary += "Announcing to %s\n" % (announce_list)
1928
1929             if action:
1930                 self.update_subst()
1931                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1932                 if cnf.get("Dinstall::TrackingServer") and \
1933                    self.pkg.changes["architecture"].has_key("source"):
1934                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1935                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1936
1937                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1938                 utils.send_mail(mail_message)
1939
1940                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1941
1942         if cnf.FindB("Dinstall::CloseBugs"):
1943             summary = self.close_bugs(summary, action)
1944
1945         del self.Subst["__SHORT_SUMMARY__"]
1946
1947         return summary
1948
1949     ###########################################################################
1950     @session_wrapper
1951     def accept (self, summary, short_summary, session=None):
1952         """
1953         Accept an upload.
1954
1955         This moves all files referenced from the .changes into the pool,
1956         sends the accepted mail, announces to lists, closes bugs and
1957         also checks for override disparities. If enabled it will write out
1958         the version history for the BTS Version Tracking and will finally call
1959         L{queue_build}.
1960
1961         @type summary: string
1962         @param summary: Summary text
1963
1964         @type short_summary: string
1965         @param short_summary: Short summary
1966         """
1967
1968         cnf = Config()
1969         stats = SummaryStats()
1970
1971         print "Installing."
1972         self.logger.log(["installing changes", self.pkg.changes_file])
1973
1974         poolfiles = []
1975
1976         # Add the .dsc file to the DB first
1977         for newfile, entry in self.pkg.files.items():
1978             if entry["type"] == "dsc":
1979                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1980                 for j in pfs:
1981                     poolfiles.append(j)
1982
1983         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1984         for newfile, entry in self.pkg.files.items():
1985             if entry["type"] == "deb":
1986                 poolfiles.append(add_deb_to_db(self, newfile, session))
1987
1988         # If this is a sourceful diff only upload that is moving
1989         # cross-component we need to copy the .orig files into the new
1990         # component too for the same reasons as above.
1991         # XXX: mhy: I think this should be in add_dsc_to_db
1992         if self.pkg.changes["architecture"].has_key("source"):
1993             for orig_file in self.pkg.orig_files.keys():
1994                 if not self.pkg.orig_files[orig_file].has_key("id"):
1995                     continue # Skip if it's not in the pool
1996                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1997                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1998                     continue # Skip if the location didn't change
1999
2000                 # Do the move
2001                 oldf = get_poolfile_by_id(orig_file_id, session)
2002                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2003                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2004                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2005
2006                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2007
2008                 # TODO: Care about size/md5sum collisions etc
2009                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2010
2011                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2012                 if newf is None:
2013                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2014                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2015
2016                     session.flush()
2017
2018                     # Don't reference the old file from this changes
2019                     for p in poolfiles:
2020                         if p.file_id == oldf.file_id:
2021                             poolfiles.remove(p)
2022
2023                     poolfiles.append(newf)
2024
2025                     # Fix up the DSC references
2026                     toremove = []
2027
2028                     for df in source.srcfiles:
2029                         if df.poolfile.file_id == oldf.file_id:
2030                             # Add a new DSC entry and mark the old one for deletion
2031                             # Don't do it in the loop so we don't change the thing we're iterating over
2032                             newdscf = DSCFile()
2033                             newdscf.source_id = source.source_id
2034                             newdscf.poolfile_id = newf.file_id
2035                             session.add(newdscf)
2036
2037                             toremove.append(df)
2038
2039                     for df in toremove:
2040                         session.delete(df)
2041
2042                     # Flush our changes
2043                     session.flush()
2044
2045                     # Make sure that our source object is up-to-date
2046                     session.expire(source)
2047
2048         # Add changelog information to the database
2049         self.store_changelog()
2050
2051         # Install the files into the pool
2052         for newfile, entry in self.pkg.files.items():
2053             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2054             utils.move(newfile, destination)
2055             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2056             stats.accept_bytes += float(entry["size"])
2057
2058         # Copy the .changes file across for suite which need it.
2059         copy_changes = dict([(x.copychanges, '')
2060                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2061                              if x.copychanges is not None])
2062
2063         for dest in copy_changes.keys():
2064             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2065
2066         # We're done - commit the database changes
2067         session.commit()
2068         # Our SQL session will automatically start a new transaction after
2069         # the last commit
2070
2071         # Move the .changes into the 'done' directory
2072         utils.move(self.pkg.changes_file,
2073                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2074
2075         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2076             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2077
2078         self.update_subst()
2079         self.Subst["__SUMMARY__"] = summary
2080         mail_message = utils.TemplateSubst(self.Subst,
2081                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2082         utils.send_mail(mail_message)
2083         self.announce(short_summary, 1)
2084
2085         ## Helper stuff for DebBugs Version Tracking
2086         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2087             if self.pkg.changes["architecture"].has_key("source"):
2088                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2089                 version_history = os.fdopen(fd, 'w')
2090                 version_history.write(self.pkg.dsc["bts changelog"])
2091                 version_history.close()
2092                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2093                                       self.pkg.changes_file[:-8]+".versions")
2094                 os.rename(temp_filename, filename)
2095                 os.chmod(filename, 0644)
2096
2097             # Write out the binary -> source mapping.
2098             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2099             debinfo = os.fdopen(fd, 'w')
2100             for name, entry in sorted(self.pkg.files.items()):
2101                 if entry["type"] == "deb":
2102                     line = " ".join([entry["package"], entry["version"],
2103                                      entry["architecture"], entry["source package"],
2104                                      entry["source version"]])
2105                     debinfo.write(line+"\n")
2106             debinfo.close()
2107             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2108                                   self.pkg.changes_file[:-8]+".debinfo")
2109             os.rename(temp_filename, filename)
2110             os.chmod(filename, 0644)
2111
2112         session.commit()
2113
2114         # Set up our copy queues (e.g. buildd queues)
2115         for suite_name in self.pkg.changes["distribution"].keys():
2116             suite = get_suite(suite_name, session)
2117             for q in suite.copy_queues:
2118                 for f in poolfiles:
2119                     q.add_file_from_pool(f)
2120
2121         session.commit()
2122
2123         # Finally...
2124         stats.accept_count += 1
2125
2126     def check_override(self):
2127         """
2128         Checks override entries for validity. Mails "Override disparity" warnings,
2129         if that feature is enabled.
2130
2131         Abandons the check if
2132           - override disparity checks are disabled
2133           - mail sending is disabled
2134         """
2135
2136         cnf = Config()
2137
2138         # Abandon the check if override disparity checks have been disabled
2139         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2140             return
2141
2142         summary = self.pkg.check_override()
2143
2144         if summary == "":
2145             return
2146
2147         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2148
2149         self.update_subst()
2150         self.Subst["__SUMMARY__"] = summary
2151         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2152         utils.send_mail(mail_message)
2153         del self.Subst["__SUMMARY__"]
2154
2155     ###########################################################################
2156
2157     def remove(self, from_dir=None):
2158         """
2159         Used (for instance) in p-u to remove the package from unchecked
2160
2161         Also removes the package from holding area.
2162         """
2163         if from_dir is None:
2164             from_dir = self.pkg.directory
2165         h = Holding()
2166
2167         for f in self.pkg.files.keys():
2168             os.unlink(os.path.join(from_dir, f))
2169             if os.path.exists(os.path.join(h.holding_dir, f)):
2170                 os.unlink(os.path.join(h.holding_dir, f))
2171
2172         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2173         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2174             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2175
2176     ###########################################################################
2177
2178     def move_to_queue (self, queue):
2179         """
2180         Move files to a destination queue using the permissions in the table
2181         """
2182         h = Holding()
2183         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2184                    queue.path, perms=int(queue.change_perms, 8))
2185         for f in self.pkg.files.keys():
2186             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2187
2188     ###########################################################################
2189
2190     def force_reject(self, reject_files):
2191         """
2192         Forcefully move files from the current directory to the
2193         reject directory.  If any file already exists in the reject
2194         directory it will be moved to the morgue to make way for
2195         the new file.
2196
2197         @type reject_files: dict
2198         @param reject_files: file dictionary
2199
2200         """
2201
2202         cnf = Config()
2203
2204         for file_entry in reject_files:
2205             # Skip any files which don't exist or which we don't have permission to copy.
2206             if os.access(file_entry, os.R_OK) == 0:
2207                 continue
2208
2209             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2210
2211             try:
2212                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2213             except OSError, e:
2214                 # File exists?  Let's find a new name by adding a number
2215                 if e.errno == errno.EEXIST:
2216                     try:
2217                         dest_file = utils.find_next_free(dest_file, 255)
2218                     except NoFreeFilenameError:
2219                         # Something's either gone badly Pete Tong, or
2220                         # someone is trying to exploit us.
2221                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2222                         return
2223
2224                     # Make sure we really got it
2225                     try:
2226                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2227                     except OSError, e:
2228                         # Likewise
2229                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2230                         return
2231                 else:
2232                     raise
2233             # If we got here, we own the destination file, so we can
2234             # safely overwrite it.
2235             utils.move(file_entry, dest_file, 1, perms=0660)
2236             os.close(dest_fd)
2237
2238     ###########################################################################
2239     def do_reject (self, manual=0, reject_message="", notes=""):
2240         """
2241         Reject an upload. If called without a reject message or C{manual} is
2242         true, spawn an editor so the user can write one.
2243
2244         @type manual: bool
2245         @param manual: manual or automated rejection
2246
2247         @type reject_message: string
2248         @param reject_message: A reject message
2249
2250         @return: 0
2251
2252         """
2253         # If we weren't given a manual rejection message, spawn an
2254         # editor so the user can add one in...
2255         if manual and not reject_message:
2256             (fd, temp_filename) = utils.temp_filename()
2257             temp_file = os.fdopen(fd, 'w')
2258             if len(notes) > 0:
2259                 for note in notes:
2260                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2261                                     % (note.author, note.version, note.notedate, note.comment))
2262             temp_file.close()
2263             editor = os.environ.get("EDITOR","vi")
2264             answer = 'E'
2265             while answer == 'E':
2266                 os.system("%s %s" % (editor, temp_filename))
2267                 temp_fh = utils.open_file(temp_filename)
2268                 reject_message = "".join(temp_fh.readlines())
2269                 temp_fh.close()
2270                 print "Reject message:"
2271                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2272                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2273                 answer = "XXX"
2274                 while prompt.find(answer) == -1:
2275                     answer = utils.our_raw_input(prompt)
2276                     m = re_default_answer.search(prompt)
2277                     if answer == "":
2278                         answer = m.group(1)
2279                     answer = answer[:1].upper()
2280             os.unlink(temp_filename)
2281             if answer == 'A':
2282                 return 1
2283             elif answer == 'Q':
2284                 sys.exit(0)
2285
2286         print "Rejecting.\n"
2287
2288         cnf = Config()
2289
2290         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2291         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2292
2293         # Move all the files into the reject directory
2294         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2295         self.force_reject(reject_files)
2296
2297         # If we fail here someone is probably trying to exploit the race
2298         # so let's just raise an exception ...
2299         if os.path.exists(reason_filename):
2300             os.unlink(reason_filename)
2301         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2302
2303         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2304
2305         self.update_subst()
2306         if not manual:
2307             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2308             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2309             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2310             os.write(reason_fd, reject_message)
2311             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2312         else:
2313             # Build up the rejection email
2314             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2315             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2316             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2317             self.Subst["__REJECT_MESSAGE__"] = ""
2318             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2319             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2320             # Write the rejection email out as the <foo>.reason file
2321             os.write(reason_fd, reject_mail_message)
2322
2323         del self.Subst["__REJECTOR_ADDRESS__"]
2324         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2325         del self.Subst["__CC__"]
2326
2327         os.close(reason_fd)
2328
2329         # Send the rejection mail
2330         utils.send_mail(reject_mail_message)
2331
2332         if self.logger:
2333             self.logger.log(["rejected", self.pkg.changes_file])
2334
2335         return 0
2336
2337     ################################################################################
2338     def in_override_p(self, package, component, suite, binary_type, filename, session):
2339         """
2340         Check if a package already has override entries in the DB
2341
2342         @type package: string
2343         @param package: package name
2344
2345         @type component: string
2346         @param component: database id of the component
2347
2348         @type suite: int
2349         @param suite: database id of the suite
2350
2351         @type binary_type: string
2352         @param binary_type: type of the package
2353
2354         @type filename: string
2355         @param filename: filename we check
2356
2357         @return: the database result. But noone cares anyway.
2358
2359         """
2360
2361         cnf = Config()
2362
2363         if binary_type == "": # must be source
2364             file_type = "dsc"
2365         else:
2366             file_type = binary_type
2367
2368         # Override suite name; used for example with proposed-updates
2369         oldsuite = get_suite(suite, session)
2370         if (not oldsuite is None) and oldsuite.overridesuite:
2371             suite = oldsuite.overridesuite
2372
2373         result = get_override(package, suite, component, file_type, session)
2374
2375         # If checking for a source package fall back on the binary override type
2376         if file_type == "dsc" and len(result) < 1:
2377             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2378
2379         # Remember the section and priority so we can check them later if appropriate
2380         if len(result) > 0:
2381             result = result[0]
2382             self.pkg.files[filename]["override section"] = result.section.section
2383             self.pkg.files[filename]["override priority"] = result.priority.priority
2384             return result
2385
2386         return None
2387
2388     ################################################################################
2389     def get_anyversion(self, sv_list, suite):
2390         """
2391         @type sv_list: list
2392         @param sv_list: list of (suite, version) tuples to check
2393
2394         @type suite: string
2395         @param suite: suite name
2396
2397         Description: TODO
2398         """
2399         Cnf = Config()
2400         anyversion = None
2401         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2402         for (s, v) in sv_list:
2403             if s in [ x.lower() for x in anysuite ]:
2404                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2405                     anyversion = v
2406
2407         return anyversion
2408
2409     ################################################################################
2410
2411     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2412         """
2413         @type sv_list: list
2414         @param sv_list: list of (suite, version) tuples to check
2415
2416         @type filename: string
2417         @param filename: XXX
2418
2419         @type new_version: string
2420         @param new_version: XXX
2421
2422         Ensure versions are newer than existing packages in target
2423         suites and that cross-suite version checking rules as
2424         set out in the conf file are satisfied.
2425         """
2426
2427         cnf = Config()
2428
2429         # Check versions for each target suite
2430         for target_suite in self.pkg.changes["distribution"].keys():
2431             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2432             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2433
2434             # Enforce "must be newer than target suite" even if conffile omits it
2435             if target_suite not in must_be_newer_than:
2436                 must_be_newer_than.append(target_suite)
2437
2438             for (suite, existent_version) in sv_list:
2439                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2440
2441                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2442                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2443
2444                 if suite in must_be_older_than and vercmp > -1:
2445                     cansave = 0
2446
2447                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2448                         # we really use the other suite, ignoring the conflicting one ...
2449                         addsuite = self.pkg.changes["distribution-version"][suite]
2450
2451                         add_version = self.get_anyversion(sv_list, addsuite)
2452                         target_version = self.get_anyversion(sv_list, target_suite)
2453
2454                         if not add_version:
2455                             # not add_version can only happen if we map to a suite
2456                             # that doesn't enhance the suite we're propup'ing from.
2457                             # so "propup-ver x a b c; map a d" is a problem only if
2458                             # d doesn't enhance a.
2459                             #
2460                             # i think we could always propagate in this case, rather
2461                             # than complaining. either way, this isn't a REJECT issue
2462                             #
2463                             # And - we really should complain to the dorks who configured dak
2464                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2465                             self.pkg.changes.setdefault("propdistribution", {})
2466                             self.pkg.changes["propdistribution"][addsuite] = 1
2467                             cansave = 1
2468                         elif not target_version:
2469                             # not targets_version is true when the package is NEW
2470                             # we could just stick with the "...old version..." REJECT
2471                             # for this, I think.
2472                             self.rejects.append("Won't propogate NEW packages.")
2473                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2474                             # propogation would be redundant. no need to reject though.
2475                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2476                             cansave = 1
2477                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2478                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2479                             # propogate!!
2480                             self.warnings.append("Propogating upload to %s" % (addsuite))
2481                             self.pkg.changes.setdefault("propdistribution", {})
2482                             self.pkg.changes["propdistribution"][addsuite] = 1
2483                             cansave = 1
2484
2485                     if not cansave:
2486                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2487
2488     ################################################################################
2489     def check_binary_against_db(self, filename, session):
2490         # Ensure version is sane
2491         q = session.query(BinAssociation)
2492         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2493         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2494
2495         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2496                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2497
2498         # Check for any existing copies of the file
2499         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2500         q = q.filter_by(version=self.pkg.files[filename]["version"])
2501         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2502
2503         if q.count() > 0:
2504             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2505
2506     ################################################################################
2507
2508     def check_source_against_db(self, filename, session):
2509         source = self.pkg.dsc.get("source")
2510         version = self.pkg.dsc.get("version")
2511
2512         # Ensure version is sane
2513         q = session.query(SrcAssociation)
2514         q = q.join(DBSource).filter(DBSource.source==source)
2515
2516         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2517                                        filename, version, sourceful=True)
2518
2519     ################################################################################
2520     def check_dsc_against_db(self, filename, session):
2521         """
2522
2523         @warning: NB: this function can remove entries from the 'files' index [if
2524          the orig tarball is a duplicate of the one in the archive]; if
2525          you're iterating over 'files' and call this function as part of
2526          the loop, be sure to add a check to the top of the loop to
2527          ensure you haven't just tried to dereference the deleted entry.
2528
2529         """
2530
2531         Cnf = Config()
2532         self.pkg.orig_files = {} # XXX: do we need to clear it?
2533         orig_files = self.pkg.orig_files
2534
2535         # Try and find all files mentioned in the .dsc.  This has
2536         # to work harder to cope with the multiple possible
2537         # locations of an .orig.tar.gz.
2538         # The ordering on the select is needed to pick the newest orig
2539         # when it exists in multiple places.
2540         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2541             found = None
2542             if self.pkg.files.has_key(dsc_name):
2543                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2544                 actual_size = int(self.pkg.files[dsc_name]["size"])
2545                 found = "%s in incoming" % (dsc_name)
2546
2547                 # Check the file does not already exist in the archive
2548                 ql = get_poolfile_like_name(dsc_name, session)
2549
2550                 # Strip out anything that isn't '%s' or '/%s$'
2551                 for i in ql:
2552                     if not i.filename.endswith(dsc_name):
2553                         ql.remove(i)
2554
2555                 # "[dak] has not broken them.  [dak] has fixed a
2556                 # brokenness.  Your crappy hack exploited a bug in
2557                 # the old dinstall.
2558                 #
2559                 # "(Come on!  I thought it was always obvious that
2560                 # one just doesn't release different files with
2561                 # the same name and version.)"
2562                 #                        -- ajk@ on d-devel@l.d.o
2563
2564                 if len(ql) > 0:
2565                     # Ignore exact matches for .orig.tar.gz
2566                     match = 0
2567                     if re_is_orig_source.match(dsc_name):
2568                         for i in ql:
2569                             if self.pkg.files.has_key(dsc_name) and \
2570                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2571                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2572                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2573                                 # TODO: Don't delete the entry, just mark it as not needed
2574                                 # This would fix the stupidity of changing something we often iterate over
2575                                 # whilst we're doing it
2576                                 del self.pkg.files[dsc_name]
2577                                 dsc_entry["files id"] = i.file_id
2578                                 if not orig_files.has_key(dsc_name):
2579                                     orig_files[dsc_name] = {}
2580                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2581                                 match = 1
2582
2583                                 # Don't bitch that we couldn't find this file later
2584                                 try:
2585                                     self.later_check_files.remove(dsc_name)
2586                                 except ValueError:
2587                                     pass
2588
2589
2590                     if not match:
2591                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2592
2593             elif re_is_orig_source.match(dsc_name):
2594                 # Check in the pool
2595                 ql = get_poolfile_like_name(dsc_name, session)
2596
2597                 # Strip out anything that isn't '%s' or '/%s$'
2598                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2599                 for i in ql:
2600                     if not i.filename.endswith(dsc_name):
2601                         ql.remove(i)
2602
2603                 if len(ql) > 0:
2604                     # Unfortunately, we may get more than one match here if,
2605                     # for example, the package was in potato but had an -sa
2606                     # upload in woody.  So we need to choose the right one.
2607
2608                     # default to something sane in case we don't match any or have only one
2609                     x = ql[0]
2610
2611                     if len(ql) > 1:
2612                         for i in ql:
2613                             old_file = os.path.join(i.location.path, i.filename)
2614                             old_file_fh = utils.open_file(old_file)
2615                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2616                             old_file_fh.close()
2617                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2618                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2619                                 x = i
2620
2621                     old_file = os.path.join(i.location.path, i.filename)
2622                     old_file_fh = utils.open_file(old_file)
2623                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2624                     old_file_fh.close()
2625                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2626                     found = old_file
2627                     suite_type = x.location.archive_type
2628                     # need this for updating dsc_files in install()
2629                     dsc_entry["files id"] = x.file_id
2630                     # See install() in process-accepted...
2631                     if not orig_files.has_key(dsc_name):
2632                         orig_files[dsc_name] = {}
2633                     orig_files[dsc_name]["id"] = x.file_id
2634                     orig_files[dsc_name]["path"] = old_file
2635                     orig_files[dsc_name]["location"] = x.location.location_id
2636                 else:
2637                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2638                     # Not there? Check the queue directories...
2639                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2640                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2641                             continue
2642                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2643                         if os.path.exists(in_otherdir):
2644                             in_otherdir_fh = utils.open_file(in_otherdir)
2645                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2646                             in_otherdir_fh.close()
2647                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2648                             found = in_otherdir
2649                             if not orig_files.has_key(dsc_name):
2650                                 orig_files[dsc_name] = {}
2651                             orig_files[dsc_name]["path"] = in_otherdir
2652
2653                     if not found:
2654                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2655                         continue
2656             else:
2657                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2658                 continue
2659             if actual_md5 != dsc_entry["md5sum"]:
2660                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2661             if actual_size != int(dsc_entry["size"]):
2662                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2663
2664     ################################################################################
2665     # This is used by process-new and process-holding to recheck a changes file
2666     # at the time we're running.  It mainly wraps various other internal functions
2667     # and is similar to accepted_checks - these should probably be tidied up
2668     # and combined
2669     def recheck(self, session):
2670         cnf = Config()
2671         for f in self.pkg.files.keys():
2672             # The .orig.tar.gz can disappear out from under us is it's a
2673             # duplicate of one in the archive.
2674             if not self.pkg.files.has_key(f):
2675                 continue
2676
2677             entry = self.pkg.files[f]
2678
2679             # Check that the source still exists
2680             if entry["type"] == "deb":
2681                 source_version = entry["source version"]
2682                 source_package = entry["source package"]
2683                 if not self.pkg.changes["architecture"].has_key("source") \
2684                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2685                     source_epochless_version = re_no_epoch.sub('', source_version)
2686                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2687                     found = False
2688                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2689                         if cnf.has_key("Dir::Queue::%s" % (q)):
2690                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2691                                 found = True
2692                     if not found:
2693                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2694
2695             # Version and file overwrite checks
2696             if entry["type"] == "deb":
2697                 self.check_binary_against_db(f, session)
2698             elif entry["type"] == "dsc":
2699                 self.check_source_against_db(f, session)
2700                 self.check_dsc_against_db(f, session)
2701
2702     ################################################################################
2703     def accepted_checks(self, overwrite_checks, session):
2704         # Recheck anything that relies on the database; since that's not
2705         # frozen between accept and our run time when called from p-a.
2706
2707         # overwrite_checks is set to False when installing to stable/oldstable
2708
2709         propogate={}
2710         nopropogate={}
2711
2712         # Find the .dsc (again)
2713         dsc_filename = None
2714         for f in self.pkg.files.keys():
2715             if self.pkg.files[f]["type"] == "dsc":
2716                 dsc_filename = f
2717
2718         for checkfile in self.pkg.files.keys():
2719             # The .orig.tar.gz can disappear out from under us is it's a
2720             # duplicate of one in the archive.
2721             if not self.pkg.files.has_key(checkfile):
2722                 continue
2723
2724             entry = self.pkg.files[checkfile]
2725
2726             # Check that the source still exists
2727             if entry["type"] == "deb":
2728                 source_version = entry["source version"]
2729                 source_package = entry["source package"]
2730                 if not self.pkg.changes["architecture"].has_key("source") \
2731                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2732                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2733
2734             # Version and file overwrite checks
2735             if overwrite_checks:
2736                 if entry["type"] == "deb":
2737                     self.check_binary_against_db(checkfile, session)
2738                 elif entry["type"] == "dsc":
2739                     self.check_source_against_db(checkfile, session)
2740                     self.check_dsc_against_db(dsc_filename, session)
2741
2742             # propogate in the case it is in the override tables:
2743             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2744                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2745                     propogate[suite] = 1
2746                 else:
2747                     nopropogate[suite] = 1
2748
2749         for suite in propogate.keys():
2750             if suite in nopropogate:
2751                 continue
2752             self.pkg.changes["distribution"][suite] = 1
2753
2754         for checkfile in self.pkg.files.keys():
2755             # Check the package is still in the override tables
2756             for suite in self.pkg.changes["distribution"].keys():
2757                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2758                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2759
2760     ################################################################################
2761     # If any file of an upload has a recent mtime then chances are good
2762     # the file is still being uploaded.
2763
2764     def upload_too_new(self):
2765         cnf = Config()
2766         too_new = False
2767         # Move back to the original directory to get accurate time stamps
2768         cwd = os.getcwd()
2769         os.chdir(self.pkg.directory)
2770         file_list = self.pkg.files.keys()
2771         file_list.extend(self.pkg.dsc_files.keys())
2772         file_list.append(self.pkg.changes_file)
2773         for f in file_list:
2774             try:
2775                 last_modified = time.time()-os.path.getmtime(f)
2776                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2777                     too_new = True
2778                     break
2779             except:
2780                 pass
2781
2782         os.chdir(cwd)
2783         return too_new
2784
2785     def store_changelog(self):
2786
2787         # Skip binary-only upload if it is not a bin-NMU
2788         if not self.pkg.changes['architecture'].has_key('source'):
2789             from daklib.regexes import re_bin_only_nmu
2790             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2791                 return
2792
2793         session = DBConn().session()
2794
2795         # Check if upload already has a changelog entry
2796         query = """SELECT changelog_id FROM changes WHERE source = :source
2797                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2798         if session.execute(query, {'source': self.pkg.changes['source'], \
2799                                    'version': self.pkg.changes['version'], \
2800                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2801             session.commit()
2802             return
2803
2804         # Add current changelog text into changelogs_text table, return created ID
2805         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2806         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2807
2808         # Link ID to the upload available in changes table
2809         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2810                    AND version = :version AND architecture = :architecture"""
2811         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2812                                 'version': self.pkg.changes['version'], \
2813                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2814
2815         session.commit()