]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Get rid of overridesuite and validtime in dak.conf
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "WARNING: overriding suite %s to suite %s" % (
175                     suite, oldsuite.overridesuite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = bool(Options["Trainee"])
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 class Upload(object):
374     """
375     Everything that has to do with an upload processed.
376
377     """
378     def __init__(self):
379         self.logger = None
380         self.pkg = Changes()
381         self.reset()
382
383     ###########################################################################
384
385     def reset (self):
386         """ Reset a number of internal variables."""
387
388         # Initialize the substitution template map
389         cnf = Config()
390         self.Subst = {}
391         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
392         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
393         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
394         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
395
396         self.rejects = []
397         self.warnings = []
398         self.notes = []
399
400         self.later_check_files = []
401
402         self.pkg.reset()
403
404     def package_info(self):
405         """
406         Format various messages from this Upload to send to the maintainer.
407         """
408
409         msgs = (
410             ('Reject Reasons', self.rejects),
411             ('Warnings', self.warnings),
412             ('Notes', self.notes),
413         )
414
415         msg = ''
416         for title, messages in msgs:
417             if messages:
418                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
419         msg += '\n\n'
420
421         return msg
422
423     ###########################################################################
424     def update_subst(self):
425         """ Set up the per-package template substitution mappings """
426
427         cnf = Config()
428
429         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
430         if not self.pkg.changes.has_key("architecture") or not \
431            isinstance(self.pkg.changes["architecture"], dict):
432             self.pkg.changes["architecture"] = { "Unknown" : "" }
433
434         # and maintainer2047 may not exist.
435         if not self.pkg.changes.has_key("maintainer2047"):
436             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
437
438         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
439         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
440         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
441
442         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
443         if self.pkg.changes["architecture"].has_key("source") and \
444            self.pkg.changes["changedby822"] != "" and \
445            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
446
447             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
448             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
449             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
450         else:
451             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
452             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
453             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
454
455         # Process policy doesn't set the fingerprint field and I don't want to make it
456         # do it for now as I don't want to have to deal with the case where we accepted
457         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
458         # the meantime so the package will be remarked as rejectable.  Urgh.
459         # TODO: Fix this properly
460         if self.pkg.changes.has_key('fingerprint'):
461             session = DBConn().session()
462             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
463             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
464                 if self.pkg.changes.has_key("sponsoremail"):
465                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
466             session.close()
467
468         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
469             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
470
471         # Apply any global override of the Maintainer field
472         if cnf.get("Dinstall::OverrideMaintainer"):
473             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
474             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
475
476         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
477         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
478         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
479         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
480
481     ###########################################################################
482     def load_changes(self, filename):
483         """
484         Load a changes file and setup a dictionary around it. Also checks for mandantory
485         fields  within.
486
487         @type filename: string
488         @param filename: Changes filename, full path.
489
490         @rtype: boolean
491         @return: whether the changes file was valid or not.  We may want to
492                  reject even if this is True (see what gets put in self.rejects).
493                  This is simply to prevent us even trying things later which will
494                  fail because we couldn't properly parse the file.
495         """
496         Cnf = Config()
497         self.pkg.changes_file = filename
498
499         # Parse the .changes field into a dictionary
500         try:
501             self.pkg.changes.update(parse_changes(filename))
502         except CantOpenError:
503             self.rejects.append("%s: can't read file." % (filename))
504             return False
505         except ParseChangesError, line:
506             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
507             return False
508         except ChangesUnicodeError:
509             self.rejects.append("%s: changes file not proper utf-8" % (filename))
510             return False
511
512         # Parse the Files field from the .changes into another dictionary
513         try:
514             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
515         except ParseChangesError, line:
516             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
517             return False
518         except UnknownFormatError, format:
519             self.rejects.append("%s: unknown format '%s'." % (filename, format))
520             return False
521
522         # Check for mandatory fields
523         for i in ("distribution", "source", "binary", "architecture",
524                   "version", "maintainer", "files", "changes", "description"):
525             if not self.pkg.changes.has_key(i):
526                 # Avoid undefined errors later
527                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
528                 return False
529
530         # Strip a source version in brackets from the source field
531         if re_strip_srcver.search(self.pkg.changes["source"]):
532             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
533
534         # Ensure the source field is a valid package name.
535         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
536             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
537
538         # Split multi-value fields into a lower-level dictionary
539         for i in ("architecture", "distribution", "binary", "closes"):
540             o = self.pkg.changes.get(i, "")
541             if o != "":
542                 del self.pkg.changes[i]
543
544             self.pkg.changes[i] = {}
545
546             for j in o.split():
547                 self.pkg.changes[i][j] = 1
548
549         # Fix the Maintainer: field to be RFC822/2047 compatible
550         try:
551             (self.pkg.changes["maintainer822"],
552              self.pkg.changes["maintainer2047"],
553              self.pkg.changes["maintainername"],
554              self.pkg.changes["maintaineremail"]) = \
555                    fix_maintainer (self.pkg.changes["maintainer"])
556         except ParseMaintError, msg:
557             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
558                    % (filename, self.pkg.changes["maintainer"], msg))
559
560         # ...likewise for the Changed-By: field if it exists.
561         try:
562             (self.pkg.changes["changedby822"],
563              self.pkg.changes["changedby2047"],
564              self.pkg.changes["changedbyname"],
565              self.pkg.changes["changedbyemail"]) = \
566                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
567         except ParseMaintError, msg:
568             self.pkg.changes["changedby822"] = ""
569             self.pkg.changes["changedby2047"] = ""
570             self.pkg.changes["changedbyname"] = ""
571             self.pkg.changes["changedbyemail"] = ""
572
573             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
574                    % (filename, self.pkg.changes["changed-by"], msg))
575
576         # Ensure all the values in Closes: are numbers
577         if self.pkg.changes.has_key("closes"):
578             for i in self.pkg.changes["closes"].keys():
579                 if re_isanum.match (i) == None:
580                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
581
582         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
583         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
584         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
585
586         # Check the .changes is non-empty
587         if not self.pkg.files:
588             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
589             return False
590
591         # Changes was syntactically valid even if we'll reject
592         return True
593
594     ###########################################################################
595
596     def check_distributions(self):
597         "Check and map the Distribution field"
598
599         Cnf = Config()
600
601         # Handle suite mappings
602         for m in Cnf.ValueList("SuiteMappings"):
603             args = m.split()
604             mtype = args[0]
605             if mtype == "map" or mtype == "silent-map":
606                 (source, dest) = args[1:3]
607                 if self.pkg.changes["distribution"].has_key(source):
608                     del self.pkg.changes["distribution"][source]
609                     self.pkg.changes["distribution"][dest] = 1
610                     if mtype != "silent-map":
611                         self.notes.append("Mapping %s to %s." % (source, dest))
612                 if self.pkg.changes.has_key("distribution-version"):
613                     if self.pkg.changes["distribution-version"].has_key(source):
614                         self.pkg.changes["distribution-version"][source]=dest
615             elif mtype == "map-unreleased":
616                 (source, dest) = args[1:3]
617                 if self.pkg.changes["distribution"].has_key(source):
618                     for arch in self.pkg.changes["architecture"].keys():
619                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
620                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
621                             del self.pkg.changes["distribution"][source]
622                             self.pkg.changes["distribution"][dest] = 1
623                             break
624             elif mtype == "ignore":
625                 suite = args[1]
626                 if self.pkg.changes["distribution"].has_key(suite):
627                     del self.pkg.changes["distribution"][suite]
628                     self.warnings.append("Ignoring %s as a target suite." % (suite))
629             elif mtype == "reject":
630                 suite = args[1]
631                 if self.pkg.changes["distribution"].has_key(suite):
632                     self.rejects.append("Uploads to %s are not accepted." % (suite))
633             elif mtype == "propup-version":
634                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
635                 #
636                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
637                 if self.pkg.changes["distribution"].has_key(args[1]):
638                     self.pkg.changes.setdefault("distribution-version", {})
639                     for suite in args[2:]:
640                         self.pkg.changes["distribution-version"][suite] = suite
641
642         # Ensure there is (still) a target distribution
643         if len(self.pkg.changes["distribution"].keys()) < 1:
644             self.rejects.append("No valid distribution remaining.")
645
646         # Ensure target distributions exist
647         for suite in self.pkg.changes["distribution"].keys():
648             if not Cnf.has_key("Suite::%s" % (suite)):
649                 self.rejects.append("Unknown distribution `%s'." % (suite))
650
651     ###########################################################################
652
653     def binary_file_checks(self, f, session):
654         cnf = Config()
655         entry = self.pkg.files[f]
656
657         # Extract package control information
658         deb_file = utils.open_file(f)
659         try:
660             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
661         except:
662             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
663             deb_file.close()
664             # Can't continue, none of the checks on control would work.
665             return
666
667         # Check for mandantory "Description:"
668         deb_file.seek(0)
669         try:
670             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
671         except:
672             self.rejects.append("%s: Missing Description in binary package" % (f))
673             return
674
675         deb_file.close()
676
677         # Check for mandatory fields
678         for field in [ "Package", "Architecture", "Version" ]:
679             if control.Find(field) == None:
680                 # Can't continue
681                 self.rejects.append("%s: No %s field in control." % (f, field))
682                 return
683
684         # Ensure the package name matches the one give in the .changes
685         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
686             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
687
688         # Validate the package field
689         package = control.Find("Package")
690         if not re_valid_pkg_name.match(package):
691             self.rejects.append("%s: invalid package name '%s'." % (f, package))
692
693         # Validate the version field
694         version = control.Find("Version")
695         if not re_valid_version.match(version):
696             self.rejects.append("%s: invalid version number '%s'." % (f, version))
697
698         # Ensure the architecture of the .deb is one we know about.
699         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
700         architecture = control.Find("Architecture")
701         upload_suite = self.pkg.changes["distribution"].keys()[0]
702
703         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
704             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
705             self.rejects.append("Unknown architecture '%s'." % (architecture))
706
707         # Ensure the architecture of the .deb is one of the ones
708         # listed in the .changes.
709         if not self.pkg.changes["architecture"].has_key(architecture):
710             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
711
712         # Sanity-check the Depends field
713         depends = control.Find("Depends")
714         if depends == '':
715             self.rejects.append("%s: Depends field is empty." % (f))
716
717         # Sanity-check the Provides field
718         provides = control.Find("Provides")
719         if provides:
720             provide = re_spacestrip.sub('', provides)
721             if provide == '':
722                 self.rejects.append("%s: Provides field is empty." % (f))
723             prov_list = provide.split(",")
724             for prov in prov_list:
725                 if not re_valid_pkg_name.match(prov):
726                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
727
728         # Check the section & priority match those given in the .changes (non-fatal)
729         if     control.Find("Section") and entry["section"] != "" \
730            and entry["section"] != control.Find("Section"):
731             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
732                                 (f, control.Find("Section", ""), entry["section"]))
733         if control.Find("Priority") and entry["priority"] != "" \
734            and entry["priority"] != control.Find("Priority"):
735             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
736                                 (f, control.Find("Priority", ""), entry["priority"]))
737
738         entry["package"] = package
739         entry["architecture"] = architecture
740         entry["version"] = version
741         entry["maintainer"] = control.Find("Maintainer", "")
742
743         if f.endswith(".udeb"):
744             self.pkg.files[f]["dbtype"] = "udeb"
745         elif f.endswith(".deb"):
746             self.pkg.files[f]["dbtype"] = "deb"
747         else:
748             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
749
750         entry["source"] = control.Find("Source", entry["package"])
751
752         # Get the source version
753         source = entry["source"]
754         source_version = ""
755
756         if source.find("(") != -1:
757             m = re_extract_src_version.match(source)
758             source = m.group(1)
759             source_version = m.group(2)
760
761         if not source_version:
762             source_version = self.pkg.files[f]["version"]
763
764         entry["source package"] = source
765         entry["source version"] = source_version
766
767         # Ensure the filename matches the contents of the .deb
768         m = re_isadeb.match(f)
769
770         #  package name
771         file_package = m.group(1)
772         if entry["package"] != file_package:
773             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
774                                 (f, file_package, entry["dbtype"], entry["package"]))
775         epochless_version = re_no_epoch.sub('', control.Find("Version"))
776
777         #  version
778         file_version = m.group(2)
779         if epochless_version != file_version:
780             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
781                                 (f, file_version, entry["dbtype"], epochless_version))
782
783         #  architecture
784         file_architecture = m.group(3)
785         if entry["architecture"] != file_architecture:
786             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
787                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
788
789         # Check for existent source
790         source_version = entry["source version"]
791         source_package = entry["source package"]
792         if self.pkg.changes["architecture"].has_key("source"):
793             if source_version != self.pkg.changes["version"]:
794                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
795                                     (source_version, f, self.pkg.changes["version"]))
796         else:
797             # Check in the SQL database
798             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
799                 # Check in one of the other directories
800                 source_epochless_version = re_no_epoch.sub('', source_version)
801                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
802                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
803                     entry["byhand"] = 1
804                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
805                     entry["new"] = 1
806                 else:
807                     dsc_file_exists = False
808                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
809                         if cnf.has_key("Dir::Queue::%s" % (myq)):
810                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
811                                 dsc_file_exists = True
812                                 break
813
814                     if not dsc_file_exists:
815                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
816
817         # Check the version and for file overwrites
818         self.check_binary_against_db(f, session)
819
820         # Temporarily disable contents generation until we change the table storage layout
821         #b = Binary(f)
822         #b.scan_package()
823         #if len(b.rejects) > 0:
824         #    for j in b.rejects:
825         #        self.rejects.append(j)
826
827     def source_file_checks(self, f, session):
828         entry = self.pkg.files[f]
829
830         m = re_issource.match(f)
831         if not m:
832             return
833
834         entry["package"] = m.group(1)
835         entry["version"] = m.group(2)
836         entry["type"] = m.group(3)
837
838         # Ensure the source package name matches the Source filed in the .changes
839         if self.pkg.changes["source"] != entry["package"]:
840             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
841
842         # Ensure the source version matches the version in the .changes file
843         if re_is_orig_source.match(f):
844             changes_version = self.pkg.changes["chopversion2"]
845         else:
846             changes_version = self.pkg.changes["chopversion"]
847
848         if changes_version != entry["version"]:
849             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
850
851         # Ensure the .changes lists source in the Architecture field
852         if not self.pkg.changes["architecture"].has_key("source"):
853             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
854
855         # Check the signature of a .dsc file
856         if entry["type"] == "dsc":
857             # check_signature returns either:
858             #  (None, [list, of, rejects]) or (signature, [])
859             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
860             for j in rejects:
861                 self.rejects.append(j)
862
863         entry["architecture"] = "source"
864
865     def per_suite_file_checks(self, f, suite, session):
866         cnf = Config()
867         entry = self.pkg.files[f]
868
869         # Skip byhand
870         if entry.has_key("byhand"):
871             return
872
873         # Check we have fields we need to do these checks
874         oktogo = True
875         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
876             if not entry.has_key(m):
877                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
878                 oktogo = False
879
880         if not oktogo:
881             return
882
883         # Handle component mappings
884         for m in cnf.ValueList("ComponentMappings"):
885             (source, dest) = m.split()
886             if entry["component"] == source:
887                 entry["original component"] = source
888                 entry["component"] = dest
889
890         # Ensure the component is valid for the target suite
891         if cnf.has_key("Suite:%s::Components" % (suite)) and \
892            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
893             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
894             return
895
896         # Validate the component
897         if not get_component(entry["component"], session):
898             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
899             return
900
901         # See if the package is NEW
902         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
903             entry["new"] = 1
904
905         # Validate the priority
906         if entry["priority"].find('/') != -1:
907             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
908
909         # Determine the location
910         location = cnf["Dir::Pool"]
911         l = get_location(location, entry["component"], session=session)
912         if l is None:
913             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
914             entry["location id"] = -1
915         else:
916             entry["location id"] = l.location_id
917
918         # Check the md5sum & size against existing files (if any)
919         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
920
921         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
922                                          entry["size"], entry["md5sum"], entry["location id"])
923
924         if found is None:
925             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
926         elif found is False and poolfile is not None:
927             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
928         else:
929             if poolfile is None:
930                 entry["files id"] = None
931             else:
932                 entry["files id"] = poolfile.file_id
933
934         # Check for packages that have moved from one component to another
935         entry['suite'] = suite
936         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
937         if res.rowcount > 0:
938             entry["othercomponents"] = res.fetchone()[0]
939
940     def check_files(self, action=True):
941         file_keys = self.pkg.files.keys()
942         holding = Holding()
943         cnf = Config()
944
945         if action:
946             cwd = os.getcwd()
947             os.chdir(self.pkg.directory)
948             for f in file_keys:
949                 ret = holding.copy_to_holding(f)
950                 if ret is not None:
951                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
952
953             os.chdir(cwd)
954
955         # check we already know the changes file
956         # [NB: this check must be done post-suite mapping]
957         base_filename = os.path.basename(self.pkg.changes_file)
958
959         session = DBConn().session()
960
961         try:
962             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
963             # if in the pool or in a queue other than unchecked, reject
964             if (dbc.in_queue is None) \
965                    or (dbc.in_queue is not None
966                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
967                 self.rejects.append("%s file already known to dak" % base_filename)
968         except NoResultFound, e:
969             # not known, good
970             pass
971
972         has_binaries = False
973         has_source = False
974
975         for f, entry in self.pkg.files.items():
976             # Ensure the file does not already exist in one of the accepted directories
977             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
978                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
979                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
980                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
981
982             if not re_taint_free.match(f):
983                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
984
985             # Check the file is readable
986             if os.access(f, os.R_OK) == 0:
987                 # When running in -n, copy_to_holding() won't have
988                 # generated the reject_message, so we need to.
989                 if action:
990                     if os.path.exists(f):
991                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
992                     else:
993                         # Don't directly reject, mark to check later to deal with orig's
994                         # we can find in the pool
995                         self.later_check_files.append(f)
996                 entry["type"] = "unreadable"
997                 continue
998
999             # If it's byhand skip remaining checks
1000             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1001                 entry["byhand"] = 1
1002                 entry["type"] = "byhand"
1003
1004             # Checks for a binary package...
1005             elif re_isadeb.match(f):
1006                 has_binaries = True
1007                 entry["type"] = "deb"
1008
1009                 # This routine appends to self.rejects/warnings as appropriate
1010                 self.binary_file_checks(f, session)
1011
1012             # Checks for a source package...
1013             elif re_issource.match(f):
1014                 has_source = True
1015
1016                 # This routine appends to self.rejects/warnings as appropriate
1017                 self.source_file_checks(f, session)
1018
1019             # Not a binary or source package?  Assume byhand...
1020             else:
1021                 entry["byhand"] = 1
1022                 entry["type"] = "byhand"
1023
1024             # Per-suite file checks
1025             entry["oldfiles"] = {}
1026             for suite in self.pkg.changes["distribution"].keys():
1027                 self.per_suite_file_checks(f, suite, session)
1028
1029         session.close()
1030
1031         # If the .changes file says it has source, it must have source.
1032         if self.pkg.changes["architecture"].has_key("source"):
1033             if not has_source:
1034                 self.rejects.append("no source found and Architecture line in changes mention source.")
1035
1036             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1037                 self.rejects.append("source only uploads are not supported.")
1038
1039     ###########################################################################
1040     def check_dsc(self, action=True, session=None):
1041         """Returns bool indicating whether or not the source changes are valid"""
1042         # Ensure there is source to check
1043         if not self.pkg.changes["architecture"].has_key("source"):
1044             return True
1045
1046         # Find the .dsc
1047         dsc_filename = None
1048         for f, entry in self.pkg.files.items():
1049             if entry["type"] == "dsc":
1050                 if dsc_filename:
1051                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1052                     return False
1053                 else:
1054                     dsc_filename = f
1055
1056         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1057         if not dsc_filename:
1058             self.rejects.append("source uploads must contain a dsc file")
1059             return False
1060
1061         # Parse the .dsc file
1062         try:
1063             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1064         except CantOpenError:
1065             # if not -n copy_to_holding() will have done this for us...
1066             if not action:
1067                 self.rejects.append("%s: can't read file." % (dsc_filename))
1068         except ParseChangesError, line:
1069             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1070         except InvalidDscError, line:
1071             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1072         except ChangesUnicodeError:
1073             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1074
1075         # Build up the file list of files mentioned by the .dsc
1076         try:
1077             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1078         except NoFilesFieldError:
1079             self.rejects.append("%s: no Files: field." % (dsc_filename))
1080             return False
1081         except UnknownFormatError, format:
1082             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1083             return False
1084         except ParseChangesError, line:
1085             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1086             return False
1087
1088         # Enforce mandatory fields
1089         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1090             if not self.pkg.dsc.has_key(i):
1091                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1092                 return False
1093
1094         # Validate the source and version fields
1095         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1096             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1097         if not re_valid_version.match(self.pkg.dsc["version"]):
1098             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1099
1100         # Only a limited list of source formats are allowed in each suite
1101         for dist in self.pkg.changes["distribution"].keys():
1102             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1103             if self.pkg.dsc["format"] not in allowed:
1104                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1105
1106         # Validate the Maintainer field
1107         try:
1108             # We ignore the return value
1109             fix_maintainer(self.pkg.dsc["maintainer"])
1110         except ParseMaintError, msg:
1111             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1112                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1113
1114         # Validate the build-depends field(s)
1115         for field_name in [ "build-depends", "build-depends-indep" ]:
1116             field = self.pkg.dsc.get(field_name)
1117             if field:
1118                 # Have apt try to parse them...
1119                 try:
1120                     apt_pkg.ParseSrcDepends(field)
1121                 except:
1122                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1123
1124         # Ensure the version number in the .dsc matches the version number in the .changes
1125         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1126         changes_version = self.pkg.files[dsc_filename]["version"]
1127
1128         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1129             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1130
1131         # Ensure the Files field contain only what's expected
1132         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1133
1134         # Ensure source is newer than existing source in target suites
1135         session = DBConn().session()
1136         self.check_source_against_db(dsc_filename, session)
1137         self.check_dsc_against_db(dsc_filename, session)
1138
1139         dbchg = get_dbchange(self.pkg.changes_file, session)
1140
1141         # Finally, check if we're missing any files
1142         for f in self.later_check_files:
1143             print 'XXX: %s' % f
1144             # Check if we've already processed this file if we have a dbchg object
1145             ok = False
1146             if dbchg:
1147                 for pf in dbchg.files:
1148                     if pf.filename == f and pf.processed:
1149                         self.notes.append('%s was already processed so we can go ahead' % f)
1150                         ok = True
1151                         del self.pkg.files[f]
1152             if not ok:
1153                 self.rejects.append("Could not find file %s references in changes" % f)
1154
1155         session.close()
1156
1157         return True
1158
1159     ###########################################################################
1160
1161     def get_changelog_versions(self, source_dir):
1162         """Extracts a the source package and (optionally) grabs the
1163         version history out of debian/changelog for the BTS."""
1164
1165         cnf = Config()
1166
1167         # Find the .dsc (again)
1168         dsc_filename = None
1169         for f in self.pkg.files.keys():
1170             if self.pkg.files[f]["type"] == "dsc":
1171                 dsc_filename = f
1172
1173         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1174         if not dsc_filename:
1175             return
1176
1177         # Create a symlink mirror of the source files in our temporary directory
1178         for f in self.pkg.files.keys():
1179             m = re_issource.match(f)
1180             if m:
1181                 src = os.path.join(source_dir, f)
1182                 # If a file is missing for whatever reason, give up.
1183                 if not os.path.exists(src):
1184                     return
1185                 ftype = m.group(3)
1186                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1187                    self.pkg.orig_files[f].has_key("path"):
1188                     continue
1189                 dest = os.path.join(os.getcwd(), f)
1190                 os.symlink(src, dest)
1191
1192         # If the orig files are not a part of the upload, create symlinks to the
1193         # existing copies.
1194         for orig_file in self.pkg.orig_files.keys():
1195             if not self.pkg.orig_files[orig_file].has_key("path"):
1196                 continue
1197             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1198             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1199
1200         # Extract the source
1201         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1202         (result, output) = commands.getstatusoutput(cmd)
1203         if (result != 0):
1204             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1205             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1206             return
1207
1208         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1209             return
1210
1211         # Get the upstream version
1212         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1213         if re_strip_revision.search(upstr_version):
1214             upstr_version = re_strip_revision.sub('', upstr_version)
1215
1216         # Ensure the changelog file exists
1217         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1218         if not os.path.exists(changelog_filename):
1219             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1220             return
1221
1222         # Parse the changelog
1223         self.pkg.dsc["bts changelog"] = ""
1224         changelog_file = utils.open_file(changelog_filename)
1225         for line in changelog_file.readlines():
1226             m = re_changelog_versions.match(line)
1227             if m:
1228                 self.pkg.dsc["bts changelog"] += line
1229         changelog_file.close()
1230
1231         # Check we found at least one revision in the changelog
1232         if not self.pkg.dsc["bts changelog"]:
1233             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1234
1235     def check_source(self):
1236         # Bail out if:
1237         #    a) there's no source
1238         if not self.pkg.changes["architecture"].has_key("source"):
1239             return
1240
1241         tmpdir = utils.temp_dirname()
1242
1243         # Move into the temporary directory
1244         cwd = os.getcwd()
1245         os.chdir(tmpdir)
1246
1247         # Get the changelog version history
1248         self.get_changelog_versions(cwd)
1249
1250         # Move back and cleanup the temporary tree
1251         os.chdir(cwd)
1252
1253         try:
1254             shutil.rmtree(tmpdir)
1255         except OSError, e:
1256             if e.errno != errno.EACCES:
1257                 print "foobar"
1258                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1259
1260             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1261             # We probably have u-r or u-w directories so chmod everything
1262             # and try again.
1263             cmd = "chmod -R u+rwx %s" % (tmpdir)
1264             result = os.system(cmd)
1265             if result != 0:
1266                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1267             shutil.rmtree(tmpdir)
1268         except Exception, e:
1269             print "foobar2 (%s)" % e
1270             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1271
1272     ###########################################################################
1273     def ensure_hashes(self):
1274         # Make sure we recognise the format of the Files: field in the .changes
1275         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1276         if len(format) == 2:
1277             format = int(format[0]), int(format[1])
1278         else:
1279             format = int(float(format[0])), 0
1280
1281         # We need to deal with the original changes blob, as the fields we need
1282         # might not be in the changes dict serialised into the .dak anymore.
1283         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1284
1285         # Copy the checksums over to the current changes dict.  This will keep
1286         # the existing modifications to it intact.
1287         for field in orig_changes:
1288             if field.startswith('checksums-'):
1289                 self.pkg.changes[field] = orig_changes[field]
1290
1291         # Check for unsupported hashes
1292         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1293             self.rejects.append(j)
1294
1295         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1296             self.rejects.append(j)
1297
1298         # We have to calculate the hash if we have an earlier changes version than
1299         # the hash appears in rather than require it exist in the changes file
1300         for hashname, hashfunc, version in utils.known_hashes:
1301             # TODO: Move _ensure_changes_hash into this class
1302             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1303                 self.rejects.append(j)
1304             if "source" in self.pkg.changes["architecture"]:
1305                 # TODO: Move _ensure_dsc_hash into this class
1306                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1307                     self.rejects.append(j)
1308
1309     def check_hashes(self):
1310         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1311             self.rejects.append(m)
1312
1313         for m in utils.check_size(".changes", self.pkg.files):
1314             self.rejects.append(m)
1315
1316         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1317             self.rejects.append(m)
1318
1319         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1320             self.rejects.append(m)
1321
1322         self.ensure_hashes()
1323
1324     ###########################################################################
1325
1326     def ensure_orig(self, target_dir='.', session=None):
1327         """
1328         Ensures that all orig files mentioned in the changes file are present
1329         in target_dir. If they do not exist, they are symlinked into place.
1330
1331         An list containing the symlinks that were created are returned (so they
1332         can be removed).
1333         """
1334
1335         symlinked = []
1336         cnf = Config()
1337
1338         for filename, entry in self.pkg.dsc_files.iteritems():
1339             if not re_is_orig_source.match(filename):
1340                 # File is not an orig; ignore
1341                 continue
1342
1343             if os.path.exists(filename):
1344                 # File exists, no need to continue
1345                 continue
1346
1347             def symlink_if_valid(path):
1348                 f = utils.open_file(path)
1349                 md5sum = apt_pkg.md5sum(f)
1350                 f.close()
1351
1352                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1353                 expected = (int(entry['size']), entry['md5sum'])
1354
1355                 if fingerprint != expected:
1356                     return False
1357
1358                 dest = os.path.join(target_dir, filename)
1359
1360                 os.symlink(path, dest)
1361                 symlinked.append(dest)
1362
1363                 return True
1364
1365             session_ = session
1366             if session is None:
1367                 session_ = DBConn().session()
1368
1369             found = False
1370
1371             # Look in the pool
1372             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1373                 poolfile_path = os.path.join(
1374                     poolfile.location.path, poolfile.filename
1375                 )
1376
1377                 if symlink_if_valid(poolfile_path):
1378                     found = True
1379                     break
1380
1381             if session is None:
1382                 session_.close()
1383
1384             if found:
1385                 continue
1386
1387             # Look in some other queues for the file
1388             queues = ('New', 'Byhand', 'ProposedUpdates',
1389                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1390
1391             for queue in queues:
1392                 if not cnf.get('Dir::Queue::%s' % queue):
1393                     continue
1394
1395                 queuefile_path = os.path.join(
1396                     cnf['Dir::Queue::%s' % queue], filename
1397                 )
1398
1399                 if not os.path.exists(queuefile_path):
1400                     # Does not exist in this queue
1401                     continue
1402
1403                 if symlink_if_valid(queuefile_path):
1404                     break
1405
1406         return symlinked
1407
1408     ###########################################################################
1409
1410     def check_lintian(self):
1411         """
1412         Extends self.rejects by checking the output of lintian against tags
1413         specified in Dinstall::LintianTags.
1414         """
1415
1416         cnf = Config()
1417
1418         # Don't reject binary uploads
1419         if not self.pkg.changes['architecture'].has_key('source'):
1420             return
1421
1422         # Only check some distributions
1423         for dist in ('unstable', 'experimental'):
1424             if dist in self.pkg.changes['distribution']:
1425                 break
1426         else:
1427             return
1428
1429         # If we do not have a tagfile, don't do anything
1430         tagfile = cnf.get("Dinstall::LintianTags")
1431         if tagfile is None:
1432             return
1433
1434         # Parse the yaml file
1435         sourcefile = file(tagfile, 'r')
1436         sourcecontent = sourcefile.read()
1437         sourcefile.close()
1438
1439         try:
1440             lintiantags = yaml.load(sourcecontent)['lintian']
1441         except yaml.YAMLError, msg:
1442             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1443             return
1444
1445         # Try and find all orig mentioned in the .dsc
1446         symlinked = self.ensure_orig()
1447
1448         # Setup the input file for lintian
1449         fd, temp_filename = utils.temp_filename()
1450         temptagfile = os.fdopen(fd, 'w')
1451         for tags in lintiantags.values():
1452             temptagfile.writelines(['%s\n' % x for x in tags])
1453         temptagfile.close()
1454
1455         try:
1456             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1457                 (temp_filename, self.pkg.changes_file)
1458
1459             result, output = commands.getstatusoutput(cmd)
1460         finally:
1461             # Remove our tempfile and any symlinks we created
1462             os.unlink(temp_filename)
1463
1464             for symlink in symlinked:
1465                 os.unlink(symlink)
1466
1467         if result == 2:
1468             utils.warn("lintian failed for %s [return code: %s]." % \
1469                 (self.pkg.changes_file, result))
1470             utils.warn(utils.prefix_multi_line_string(output, \
1471                 " [possible output:] "))
1472
1473         def log(*txt):
1474             if self.logger:
1475                 self.logger.log(
1476                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1477                 )
1478
1479         # Generate messages
1480         parsed_tags = parse_lintian_output(output)
1481         self.rejects.extend(
1482             generate_reject_messages(parsed_tags, lintiantags, log=log)
1483         )
1484
1485     ###########################################################################
1486     def check_urgency(self):
1487         cnf = Config()
1488         if self.pkg.changes["architecture"].has_key("source"):
1489             if not self.pkg.changes.has_key("urgency"):
1490                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1491             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1492             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1493                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1494                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1495                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1496
1497     ###########################################################################
1498
1499     # Sanity check the time stamps of files inside debs.
1500     # [Files in the near future cause ugly warnings and extreme time
1501     #  travel can cause errors on extraction]
1502
1503     def check_timestamps(self):
1504         Cnf = Config()
1505
1506         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1507         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1508         tar = TarTime(future_cutoff, past_cutoff)
1509
1510         for filename, entry in self.pkg.files.items():
1511             if entry["type"] == "deb":
1512                 tar.reset()
1513                 try:
1514                     deb_file = utils.open_file(filename)
1515                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1516                     deb_file.seek(0)
1517                     try:
1518                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1519                     except SystemError, e:
1520                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1521                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1522                             raise
1523                         deb_file.seek(0)
1524                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1525
1526                     deb_file.close()
1527
1528                     future_files = tar.future_files.keys()
1529                     if future_files:
1530                         num_future_files = len(future_files)
1531                         future_file = future_files[0]
1532                         future_date = tar.future_files[future_file]
1533                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1534                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1535
1536                     ancient_files = tar.ancient_files.keys()
1537                     if ancient_files:
1538                         num_ancient_files = len(ancient_files)
1539                         ancient_file = ancient_files[0]
1540                         ancient_date = tar.ancient_files[ancient_file]
1541                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1542                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1543                 except:
1544                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1545
1546     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1547         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1548             sponsored = False
1549         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1550             sponsored = False
1551             if uid_name == "":
1552                 sponsored = True
1553         else:
1554             sponsored = True
1555             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1556                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1557                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1558                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1559                         self.pkg.changes["sponsoremail"] = uid_email
1560
1561         return sponsored
1562
1563
1564     ###########################################################################
1565     # check_signed_by_key checks
1566     ###########################################################################
1567
1568     def check_signed_by_key(self):
1569         """Ensure the .changes is signed by an authorized uploader."""
1570         session = DBConn().session()
1571
1572         # First of all we check that the person has proper upload permissions
1573         # and that this upload isn't blocked
1574         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1575
1576         if fpr is None:
1577             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1578             return
1579
1580         # TODO: Check that import-keyring adds UIDs properly
1581         if not fpr.uid:
1582             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1583             return
1584
1585         # Check that the fingerprint which uploaded has permission to do so
1586         self.check_upload_permissions(fpr, session)
1587
1588         # Check that this package is not in a transition
1589         self.check_transition(session)
1590
1591         session.close()
1592
1593
1594     def check_upload_permissions(self, fpr, session):
1595         # Check any one-off upload blocks
1596         self.check_upload_blocks(fpr, session)
1597
1598         # Start with DM as a special case
1599         # DM is a special case unfortunately, so we check it first
1600         # (keys with no source access get more access than DMs in one
1601         #  way; DMs can only upload for their packages whether source
1602         #  or binary, whereas keys with no access might be able to
1603         #  upload some binaries)
1604         if fpr.source_acl.access_level == 'dm':
1605             self.check_dm_upload(fpr, session)
1606         else:
1607             # Check source-based permissions for other types
1608             if self.pkg.changes["architecture"].has_key("source") and \
1609                 fpr.source_acl.access_level is None:
1610                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1611                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1612                 self.rejects.append(rej)
1613                 return
1614             # If not a DM, we allow full upload rights
1615             uid_email = "%s@debian.org" % (fpr.uid.uid)
1616             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1617
1618
1619         # Check binary upload permissions
1620         # By this point we know that DMs can't have got here unless they
1621         # are allowed to deal with the package concerned so just apply
1622         # normal checks
1623         if fpr.binary_acl.access_level == 'full':
1624             return
1625
1626         # Otherwise we're in the map case
1627         tmparches = self.pkg.changes["architecture"].copy()
1628         tmparches.pop('source', None)
1629
1630         for bam in fpr.binary_acl_map:
1631             tmparches.pop(bam.architecture.arch_string, None)
1632
1633         if len(tmparches.keys()) > 0:
1634             if fpr.binary_reject:
1635                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1636                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1637                 self.rejects.append(rej)
1638             else:
1639                 # TODO: This is where we'll implement reject vs throw away binaries later
1640                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1641                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1642                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1643                 self.rejects.append(rej)
1644
1645
1646     def check_upload_blocks(self, fpr, session):
1647         """Check whether any upload blocks apply to this source, source
1648            version, uid / fpr combination"""
1649
1650         def block_rej_template(fb):
1651             rej = 'Manual upload block in place for package %s' % fb.source
1652             if fb.version is not None:
1653                 rej += ', version %s' % fb.version
1654             return rej
1655
1656         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1657             # version is None if the block applies to all versions
1658             if fb.version is None or fb.version == self.pkg.changes['version']:
1659                 # Check both fpr and uid - either is enough to cause a reject
1660                 if fb.fpr is not None:
1661                     if fb.fpr.fingerprint == fpr.fingerprint:
1662                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1663                 if fb.uid is not None:
1664                     if fb.uid == fpr.uid:
1665                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1666
1667
1668     def check_dm_upload(self, fpr, session):
1669         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1670         ## none of the uploaded packages are NEW
1671         rej = False
1672         for f in self.pkg.files.keys():
1673             if self.pkg.files[f].has_key("byhand"):
1674                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1675                 rej = True
1676             if self.pkg.files[f].has_key("new"):
1677                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1678                 rej = True
1679
1680         if rej:
1681             return
1682
1683         ## the most recent version of the package uploaded to unstable or
1684         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1685         ## section of its control file
1686         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1687         q = q.join(SrcAssociation)
1688         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1689         q = q.order_by(desc('source.version')).limit(1)
1690
1691         r = q.all()
1692
1693         if len(r) != 1:
1694             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1695             self.rejects.append(rej)
1696             return
1697
1698         r = r[0]
1699         if not r.dm_upload_allowed:
1700             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1701             self.rejects.append(rej)
1702             return
1703
1704         ## the Maintainer: field of the uploaded .changes file corresponds with
1705         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1706         ## uploads)
1707         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1708             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1709
1710         ## the most recent version of the package uploaded to unstable or
1711         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1712         ## non-developer maintainers cannot NMU or hijack packages)
1713
1714         # srcuploaders includes the maintainer
1715         accept = False
1716         for sup in r.srcuploaders:
1717             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1718             # Eww - I hope we never have two people with the same name in Debian
1719             if email == fpr.uid.uid or name == fpr.uid.name:
1720                 accept = True
1721                 break
1722
1723         if not accept:
1724             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1725             return
1726
1727         ## none of the packages are being taken over from other source packages
1728         for b in self.pkg.changes["binary"].keys():
1729             for suite in self.pkg.changes["distribution"].keys():
1730                 q = session.query(DBSource)
1731                 q = q.join(DBBinary).filter_by(package=b)
1732                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1733
1734                 for s in q.all():
1735                     if s.source != self.pkg.changes["source"]:
1736                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1737
1738
1739
1740     def check_transition(self, session):
1741         cnf = Config()
1742
1743         sourcepkg = self.pkg.changes["source"]
1744
1745         # No sourceful upload -> no need to do anything else, direct return
1746         # We also work with unstable uploads, not experimental or those going to some
1747         # proposed-updates queue
1748         if "source" not in self.pkg.changes["architecture"] or \
1749            "unstable" not in self.pkg.changes["distribution"]:
1750             return
1751
1752         # Also only check if there is a file defined (and existant) with
1753         # checks.
1754         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1755         if transpath == "" or not os.path.exists(transpath):
1756             return
1757
1758         # Parse the yaml file
1759         sourcefile = file(transpath, 'r')
1760         sourcecontent = sourcefile.read()
1761         try:
1762             transitions = yaml.load(sourcecontent)
1763         except yaml.YAMLError, msg:
1764             # This shouldn't happen, there is a wrapper to edit the file which
1765             # checks it, but we prefer to be safe than ending up rejecting
1766             # everything.
1767             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1768             return
1769
1770         # Now look through all defined transitions
1771         for trans in transitions:
1772             t = transitions[trans]
1773             source = t["source"]
1774             expected = t["new"]
1775
1776             # Will be None if nothing is in testing.
1777             current = get_source_in_suite(source, "testing", session)
1778             if current is not None:
1779                 compare = apt_pkg.VersionCompare(current.version, expected)
1780
1781             if current is None or compare < 0:
1782                 # This is still valid, the current version in testing is older than
1783                 # the new version we wait for, or there is none in testing yet
1784
1785                 # Check if the source we look at is affected by this.
1786                 if sourcepkg in t['packages']:
1787                     # The source is affected, lets reject it.
1788
1789                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1790                         sourcepkg, trans)
1791
1792                     if current is not None:
1793                         currentlymsg = "at version %s" % (current.version)
1794                     else:
1795                         currentlymsg = "not present in testing"
1796
1797                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1798
1799                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1800 is part of a testing transition designed to get %s migrated (it is
1801 currently %s, we need version %s).  This transition is managed by the
1802 Release Team, and %s is the Release-Team member responsible for it.
1803 Please mail debian-release@lists.debian.org or contact %s directly if you
1804 need further assistance.  You might want to upload to experimental until this
1805 transition is done."""
1806                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1807
1808                     self.rejects.append(rejectmsg)
1809                     return
1810
1811     ###########################################################################
1812     # End check_signed_by_key checks
1813     ###########################################################################
1814
1815     def build_summaries(self):
1816         """ Build a summary of changes the upload introduces. """
1817
1818         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1819
1820         short_summary = summary
1821
1822         # This is for direport's benefit...
1823         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1824
1825         if byhand or new:
1826             summary += "Changes: " + f
1827
1828         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1829
1830         summary += self.announce(short_summary, 0)
1831
1832         return (summary, short_summary)
1833
1834     ###########################################################################
1835
1836     def close_bugs(self, summary, action):
1837         """
1838         Send mail to close bugs as instructed by the closes field in the changes file.
1839         Also add a line to summary if any work was done.
1840
1841         @type summary: string
1842         @param summary: summary text, as given by L{build_summaries}
1843
1844         @type action: bool
1845         @param action: Set to false no real action will be done.
1846
1847         @rtype: string
1848         @return: summary. If action was taken, extended by the list of closed bugs.
1849
1850         """
1851
1852         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1853
1854         bugs = self.pkg.changes["closes"].keys()
1855
1856         if not bugs:
1857             return summary
1858
1859         bugs.sort()
1860         summary += "Closing bugs: "
1861         for bug in bugs:
1862             summary += "%s " % (bug)
1863             if action:
1864                 self.update_subst()
1865                 self.Subst["__BUG_NUMBER__"] = bug
1866                 if self.pkg.changes["distribution"].has_key("stable"):
1867                     self.Subst["__STABLE_WARNING__"] = """
1868 Note that this package is not part of the released stable Debian
1869 distribution.  It may have dependencies on other unreleased software,
1870 or other instabilities.  Please take care if you wish to install it.
1871 The update will eventually make its way into the next released Debian
1872 distribution."""
1873                 else:
1874                     self.Subst["__STABLE_WARNING__"] = ""
1875                 mail_message = utils.TemplateSubst(self.Subst, template)
1876                 utils.send_mail(mail_message)
1877
1878                 # Clear up after ourselves
1879                 del self.Subst["__BUG_NUMBER__"]
1880                 del self.Subst["__STABLE_WARNING__"]
1881
1882         if action and self.logger:
1883             self.logger.log(["closing bugs"] + bugs)
1884
1885         summary += "\n"
1886
1887         return summary
1888
1889     ###########################################################################
1890
1891     def announce(self, short_summary, action):
1892         """
1893         Send an announce mail about a new upload.
1894
1895         @type short_summary: string
1896         @param short_summary: Short summary text to include in the mail
1897
1898         @type action: bool
1899         @param action: Set to false no real action will be done.
1900
1901         @rtype: string
1902         @return: Textstring about action taken.
1903
1904         """
1905
1906         cnf = Config()
1907         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1908
1909         # Only do announcements for source uploads with a recent dpkg-dev installed
1910         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1911            self.pkg.changes["architecture"].has_key("source"):
1912             return ""
1913
1914         lists_done = {}
1915         summary = ""
1916
1917         self.Subst["__SHORT_SUMMARY__"] = short_summary
1918
1919         for dist in self.pkg.changes["distribution"].keys():
1920             suite = get_suite(dist)
1921             announce_list = suite.announce
1922             if announce_list == "" or lists_done.has_key(announce_list):
1923                 continue
1924
1925             lists_done[announce_list] = 1
1926             summary += "Announcing to %s\n" % (announce_list)
1927
1928             if action:
1929                 self.update_subst()
1930                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1931                 if cnf.get("Dinstall::TrackingServer") and \
1932                    self.pkg.changes["architecture"].has_key("source"):
1933                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1934                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1935
1936                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1937                 utils.send_mail(mail_message)
1938
1939                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1940
1941         if cnf.FindB("Dinstall::CloseBugs"):
1942             summary = self.close_bugs(summary, action)
1943
1944         del self.Subst["__SHORT_SUMMARY__"]
1945
1946         return summary
1947
1948     ###########################################################################
1949     @session_wrapper
1950     def accept (self, summary, short_summary, session=None):
1951         """
1952         Accept an upload.
1953
1954         This moves all files referenced from the .changes into the pool,
1955         sends the accepted mail, announces to lists, closes bugs and
1956         also checks for override disparities. If enabled it will write out
1957         the version history for the BTS Version Tracking and will finally call
1958         L{queue_build}.
1959
1960         @type summary: string
1961         @param summary: Summary text
1962
1963         @type short_summary: string
1964         @param short_summary: Short summary
1965         """
1966
1967         cnf = Config()
1968         stats = SummaryStats()
1969
1970         print "Installing."
1971         self.logger.log(["installing changes", self.pkg.changes_file])
1972
1973         poolfiles = []
1974
1975         # Add the .dsc file to the DB first
1976         for newfile, entry in self.pkg.files.items():
1977             if entry["type"] == "dsc":
1978                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1979                 for j in pfs:
1980                     poolfiles.append(j)
1981
1982         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1983         for newfile, entry in self.pkg.files.items():
1984             if entry["type"] == "deb":
1985                 poolfiles.append(add_deb_to_db(self, newfile, session))
1986
1987         # If this is a sourceful diff only upload that is moving
1988         # cross-component we need to copy the .orig files into the new
1989         # component too for the same reasons as above.
1990         # XXX: mhy: I think this should be in add_dsc_to_db
1991         if self.pkg.changes["architecture"].has_key("source"):
1992             for orig_file in self.pkg.orig_files.keys():
1993                 if not self.pkg.orig_files[orig_file].has_key("id"):
1994                     continue # Skip if it's not in the pool
1995                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1996                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1997                     continue # Skip if the location didn't change
1998
1999                 # Do the move
2000                 oldf = get_poolfile_by_id(orig_file_id, session)
2001                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2002                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2003                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2004
2005                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2006
2007                 # TODO: Care about size/md5sum collisions etc
2008                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2009
2010                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2011                 if newf is None:
2012                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2013                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2014
2015                     session.flush()
2016
2017                     # Don't reference the old file from this changes
2018                     for p in poolfiles:
2019                         if p.file_id == oldf.file_id:
2020                             poolfiles.remove(p)
2021
2022                     poolfiles.append(newf)
2023
2024                     # Fix up the DSC references
2025                     toremove = []
2026
2027                     for df in source.srcfiles:
2028                         if df.poolfile.file_id == oldf.file_id:
2029                             # Add a new DSC entry and mark the old one for deletion
2030                             # Don't do it in the loop so we don't change the thing we're iterating over
2031                             newdscf = DSCFile()
2032                             newdscf.source_id = source.source_id
2033                             newdscf.poolfile_id = newf.file_id
2034                             session.add(newdscf)
2035
2036                             toremove.append(df)
2037
2038                     for df in toremove:
2039                         session.delete(df)
2040
2041                     # Flush our changes
2042                     session.flush()
2043
2044                     # Make sure that our source object is up-to-date
2045                     session.expire(source)
2046
2047         # Add changelog information to the database
2048         self.store_changelog()
2049
2050         # Install the files into the pool
2051         for newfile, entry in self.pkg.files.items():
2052             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2053             utils.move(newfile, destination)
2054             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2055             stats.accept_bytes += float(entry["size"])
2056
2057         # Copy the .changes file across for suite which need it.
2058         copy_changes = dict([(x.copychanges, '')
2059                              for x in session.query(Suite).filter(Suite.suite_name.in_([self.pkg.changes["distribution"].keys()])).all()
2060                              if x.copychanges is not None])
2061
2062         for dest in copy_changes.keys():
2063             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2064
2065         # We're done - commit the database changes
2066         session.commit()
2067         # Our SQL session will automatically start a new transaction after
2068         # the last commit
2069
2070         # Move the .changes into the 'done' directory
2071         utils.move(self.pkg.changes_file,
2072                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2073
2074         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2075             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2076
2077         self.update_subst()
2078         self.Subst["__SUMMARY__"] = summary
2079         mail_message = utils.TemplateSubst(self.Subst,
2080                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2081         utils.send_mail(mail_message)
2082         self.announce(short_summary, 1)
2083
2084         ## Helper stuff for DebBugs Version Tracking
2085         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2086             if self.pkg.changes["architecture"].has_key("source"):
2087                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2088                 version_history = os.fdopen(fd, 'w')
2089                 version_history.write(self.pkg.dsc["bts changelog"])
2090                 version_history.close()
2091                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2092                                       self.pkg.changes_file[:-8]+".versions")
2093                 os.rename(temp_filename, filename)
2094                 os.chmod(filename, 0644)
2095
2096             # Write out the binary -> source mapping.
2097             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2098             debinfo = os.fdopen(fd, 'w')
2099             for name, entry in sorted(self.pkg.files.items()):
2100                 if entry["type"] == "deb":
2101                     line = " ".join([entry["package"], entry["version"],
2102                                      entry["architecture"], entry["source package"],
2103                                      entry["source version"]])
2104                     debinfo.write(line+"\n")
2105             debinfo.close()
2106             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2107                                   self.pkg.changes_file[:-8]+".debinfo")
2108             os.rename(temp_filename, filename)
2109             os.chmod(filename, 0644)
2110
2111         session.commit()
2112
2113         # Set up our copy queues (e.g. buildd queues)
2114         for suite_name in self.pkg.changes["distribution"].keys():
2115             suite = get_suite(suite_name, session)
2116             for q in suite.copy_queues:
2117                 for f in poolfiles:
2118                     q.add_file_from_pool(f)
2119
2120         session.commit()
2121
2122         # Finally...
2123         stats.accept_count += 1
2124
2125     def check_override(self):
2126         """
2127         Checks override entries for validity. Mails "Override disparity" warnings,
2128         if that feature is enabled.
2129
2130         Abandons the check if
2131           - override disparity checks are disabled
2132           - mail sending is disabled
2133         """
2134
2135         cnf = Config()
2136
2137         # Abandon the check if override disparity checks have been disabled
2138         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2139             return
2140
2141         summary = self.pkg.check_override()
2142
2143         if summary == "":
2144             return
2145
2146         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2147
2148         self.update_subst()
2149         self.Subst["__SUMMARY__"] = summary
2150         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2151         utils.send_mail(mail_message)
2152         del self.Subst["__SUMMARY__"]
2153
2154     ###########################################################################
2155
2156     def remove(self, from_dir=None):
2157         """
2158         Used (for instance) in p-u to remove the package from unchecked
2159
2160         Also removes the package from holding area.
2161         """
2162         if from_dir is None:
2163             from_dir = self.pkg.directory
2164         h = Holding()
2165
2166         for f in self.pkg.files.keys():
2167             os.unlink(os.path.join(from_dir, f))
2168             if os.path.exists(os.path.join(h.holding_dir, f)):
2169                 os.unlink(os.path.join(h.holding_dir, f))
2170
2171         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2172         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2173             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2174
2175     ###########################################################################
2176
2177     def move_to_queue (self, queue):
2178         """
2179         Move files to a destination queue using the permissions in the table
2180         """
2181         h = Holding()
2182         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2183                    queue.path, perms=int(queue.change_perms, 8))
2184         for f in self.pkg.files.keys():
2185             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2186
2187     ###########################################################################
2188
2189     def force_reject(self, reject_files):
2190         """
2191         Forcefully move files from the current directory to the
2192         reject directory.  If any file already exists in the reject
2193         directory it will be moved to the morgue to make way for
2194         the new file.
2195
2196         @type reject_files: dict
2197         @param reject_files: file dictionary
2198
2199         """
2200
2201         cnf = Config()
2202
2203         for file_entry in reject_files:
2204             # Skip any files which don't exist or which we don't have permission to copy.
2205             if os.access(file_entry, os.R_OK) == 0:
2206                 continue
2207
2208             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2209
2210             try:
2211                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2212             except OSError, e:
2213                 # File exists?  Let's find a new name by adding a number
2214                 if e.errno == errno.EEXIST:
2215                     try:
2216                         dest_file = utils.find_next_free(dest_file, 255)
2217                     except NoFreeFilenameError:
2218                         # Something's either gone badly Pete Tong, or
2219                         # someone is trying to exploit us.
2220                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2221                         return
2222
2223                     # Make sure we really got it
2224                     try:
2225                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2226                     except OSError, e:
2227                         # Likewise
2228                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2229                         return
2230                 else:
2231                     raise
2232             # If we got here, we own the destination file, so we can
2233             # safely overwrite it.
2234             utils.move(file_entry, dest_file, 1, perms=0660)
2235             os.close(dest_fd)
2236
2237     ###########################################################################
2238     def do_reject (self, manual=0, reject_message="", notes=""):
2239         """
2240         Reject an upload. If called without a reject message or C{manual} is
2241         true, spawn an editor so the user can write one.
2242
2243         @type manual: bool
2244         @param manual: manual or automated rejection
2245
2246         @type reject_message: string
2247         @param reject_message: A reject message
2248
2249         @return: 0
2250
2251         """
2252         # If we weren't given a manual rejection message, spawn an
2253         # editor so the user can add one in...
2254         if manual and not reject_message:
2255             (fd, temp_filename) = utils.temp_filename()
2256             temp_file = os.fdopen(fd, 'w')
2257             if len(notes) > 0:
2258                 for note in notes:
2259                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2260                                     % (note.author, note.version, note.notedate, note.comment))
2261             temp_file.close()
2262             editor = os.environ.get("EDITOR","vi")
2263             answer = 'E'
2264             while answer == 'E':
2265                 os.system("%s %s" % (editor, temp_filename))
2266                 temp_fh = utils.open_file(temp_filename)
2267                 reject_message = "".join(temp_fh.readlines())
2268                 temp_fh.close()
2269                 print "Reject message:"
2270                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2271                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2272                 answer = "XXX"
2273                 while prompt.find(answer) == -1:
2274                     answer = utils.our_raw_input(prompt)
2275                     m = re_default_answer.search(prompt)
2276                     if answer == "":
2277                         answer = m.group(1)
2278                     answer = answer[:1].upper()
2279             os.unlink(temp_filename)
2280             if answer == 'A':
2281                 return 1
2282             elif answer == 'Q':
2283                 sys.exit(0)
2284
2285         print "Rejecting.\n"
2286
2287         cnf = Config()
2288
2289         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2290         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2291
2292         # Move all the files into the reject directory
2293         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2294         self.force_reject(reject_files)
2295
2296         # If we fail here someone is probably trying to exploit the race
2297         # so let's just raise an exception ...
2298         if os.path.exists(reason_filename):
2299             os.unlink(reason_filename)
2300         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2301
2302         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2303
2304         self.update_subst()
2305         if not manual:
2306             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2307             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2308             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2309             os.write(reason_fd, reject_message)
2310             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2311         else:
2312             # Build up the rejection email
2313             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2314             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2315             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2316             self.Subst["__REJECT_MESSAGE__"] = ""
2317             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2318             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2319             # Write the rejection email out as the <foo>.reason file
2320             os.write(reason_fd, reject_mail_message)
2321
2322         del self.Subst["__REJECTOR_ADDRESS__"]
2323         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2324         del self.Subst["__CC__"]
2325
2326         os.close(reason_fd)
2327
2328         # Send the rejection mail
2329         utils.send_mail(reject_mail_message)
2330
2331         if self.logger:
2332             self.logger.log(["rejected", self.pkg.changes_file])
2333
2334         return 0
2335
2336     ################################################################################
2337     def in_override_p(self, package, component, suite, binary_type, filename, session):
2338         """
2339         Check if a package already has override entries in the DB
2340
2341         @type package: string
2342         @param package: package name
2343
2344         @type component: string
2345         @param component: database id of the component
2346
2347         @type suite: int
2348         @param suite: database id of the suite
2349
2350         @type binary_type: string
2351         @param binary_type: type of the package
2352
2353         @type filename: string
2354         @param filename: filename we check
2355
2356         @return: the database result. But noone cares anyway.
2357
2358         """
2359
2360         cnf = Config()
2361
2362         if binary_type == "": # must be source
2363             file_type = "dsc"
2364         else:
2365             file_type = binary_type
2366
2367         # Override suite name; used for example with proposed-updates
2368         oldsuite = get_suite(suite, session)
2369         if oldsuite.overridesuite:
2370             suite = oldsuite.overridesuite
2371
2372         result = get_override(package, suite, component, file_type, session)
2373
2374         # If checking for a source package fall back on the binary override type
2375         if file_type == "dsc" and len(result) < 1:
2376             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2377
2378         # Remember the section and priority so we can check them later if appropriate
2379         if len(result) > 0:
2380             result = result[0]
2381             self.pkg.files[filename]["override section"] = result.section.section
2382             self.pkg.files[filename]["override priority"] = result.priority.priority
2383             return result
2384
2385         return None
2386
2387     ################################################################################
2388     def get_anyversion(self, sv_list, suite):
2389         """
2390         @type sv_list: list
2391         @param sv_list: list of (suite, version) tuples to check
2392
2393         @type suite: string
2394         @param suite: suite name
2395
2396         Description: TODO
2397         """
2398         Cnf = Config()
2399         anyversion = None
2400         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2401         for (s, v) in sv_list:
2402             if s in [ x.lower() for x in anysuite ]:
2403                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2404                     anyversion = v
2405
2406         return anyversion
2407
2408     ################################################################################
2409
2410     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2411         """
2412         @type sv_list: list
2413         @param sv_list: list of (suite, version) tuples to check
2414
2415         @type filename: string
2416         @param filename: XXX
2417
2418         @type new_version: string
2419         @param new_version: XXX
2420
2421         Ensure versions are newer than existing packages in target
2422         suites and that cross-suite version checking rules as
2423         set out in the conf file are satisfied.
2424         """
2425
2426         cnf = Config()
2427
2428         # Check versions for each target suite
2429         for target_suite in self.pkg.changes["distribution"].keys():
2430             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2431             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2432
2433             # Enforce "must be newer than target suite" even if conffile omits it
2434             if target_suite not in must_be_newer_than:
2435                 must_be_newer_than.append(target_suite)
2436
2437             for (suite, existent_version) in sv_list:
2438                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2439
2440                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2441                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2442
2443                 if suite in must_be_older_than and vercmp > -1:
2444                     cansave = 0
2445
2446                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2447                         # we really use the other suite, ignoring the conflicting one ...
2448                         addsuite = self.pkg.changes["distribution-version"][suite]
2449
2450                         add_version = self.get_anyversion(sv_list, addsuite)
2451                         target_version = self.get_anyversion(sv_list, target_suite)
2452
2453                         if not add_version:
2454                             # not add_version can only happen if we map to a suite
2455                             # that doesn't enhance the suite we're propup'ing from.
2456                             # so "propup-ver x a b c; map a d" is a problem only if
2457                             # d doesn't enhance a.
2458                             #
2459                             # i think we could always propagate in this case, rather
2460                             # than complaining. either way, this isn't a REJECT issue
2461                             #
2462                             # And - we really should complain to the dorks who configured dak
2463                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2464                             self.pkg.changes.setdefault("propdistribution", {})
2465                             self.pkg.changes["propdistribution"][addsuite] = 1
2466                             cansave = 1
2467                         elif not target_version:
2468                             # not targets_version is true when the package is NEW
2469                             # we could just stick with the "...old version..." REJECT
2470                             # for this, I think.
2471                             self.rejects.append("Won't propogate NEW packages.")
2472                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2473                             # propogation would be redundant. no need to reject though.
2474                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2475                             cansave = 1
2476                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2477                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2478                             # propogate!!
2479                             self.warnings.append("Propogating upload to %s" % (addsuite))
2480                             self.pkg.changes.setdefault("propdistribution", {})
2481                             self.pkg.changes["propdistribution"][addsuite] = 1
2482                             cansave = 1
2483
2484                     if not cansave:
2485                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2486
2487     ################################################################################
2488     def check_binary_against_db(self, filename, session):
2489         # Ensure version is sane
2490         q = session.query(BinAssociation)
2491         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2492         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2493
2494         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2495                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2496
2497         # Check for any existing copies of the file
2498         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2499         q = q.filter_by(version=self.pkg.files[filename]["version"])
2500         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2501
2502         if q.count() > 0:
2503             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2504
2505     ################################################################################
2506
2507     def check_source_against_db(self, filename, session):
2508         source = self.pkg.dsc.get("source")
2509         version = self.pkg.dsc.get("version")
2510
2511         # Ensure version is sane
2512         q = session.query(SrcAssociation)
2513         q = q.join(DBSource).filter(DBSource.source==source)
2514
2515         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2516                                        filename, version, sourceful=True)
2517
2518     ################################################################################
2519     def check_dsc_against_db(self, filename, session):
2520         """
2521
2522         @warning: NB: this function can remove entries from the 'files' index [if
2523          the orig tarball is a duplicate of the one in the archive]; if
2524          you're iterating over 'files' and call this function as part of
2525          the loop, be sure to add a check to the top of the loop to
2526          ensure you haven't just tried to dereference the deleted entry.
2527
2528         """
2529
2530         Cnf = Config()
2531         self.pkg.orig_files = {} # XXX: do we need to clear it?
2532         orig_files = self.pkg.orig_files
2533
2534         # Try and find all files mentioned in the .dsc.  This has
2535         # to work harder to cope with the multiple possible
2536         # locations of an .orig.tar.gz.
2537         # The ordering on the select is needed to pick the newest orig
2538         # when it exists in multiple places.
2539         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2540             found = None
2541             if self.pkg.files.has_key(dsc_name):
2542                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2543                 actual_size = int(self.pkg.files[dsc_name]["size"])
2544                 found = "%s in incoming" % (dsc_name)
2545
2546                 # Check the file does not already exist in the archive
2547                 ql = get_poolfile_like_name(dsc_name, session)
2548
2549                 # Strip out anything that isn't '%s' or '/%s$'
2550                 for i in ql:
2551                     if not i.filename.endswith(dsc_name):
2552                         ql.remove(i)
2553
2554                 # "[dak] has not broken them.  [dak] has fixed a
2555                 # brokenness.  Your crappy hack exploited a bug in
2556                 # the old dinstall.
2557                 #
2558                 # "(Come on!  I thought it was always obvious that
2559                 # one just doesn't release different files with
2560                 # the same name and version.)"
2561                 #                        -- ajk@ on d-devel@l.d.o
2562
2563                 if len(ql) > 0:
2564                     # Ignore exact matches for .orig.tar.gz
2565                     match = 0
2566                     if re_is_orig_source.match(dsc_name):
2567                         for i in ql:
2568                             if self.pkg.files.has_key(dsc_name) and \
2569                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2570                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2571                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2572                                 # TODO: Don't delete the entry, just mark it as not needed
2573                                 # This would fix the stupidity of changing something we often iterate over
2574                                 # whilst we're doing it
2575                                 del self.pkg.files[dsc_name]
2576                                 dsc_entry["files id"] = i.file_id
2577                                 if not orig_files.has_key(dsc_name):
2578                                     orig_files[dsc_name] = {}
2579                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2580                                 match = 1
2581
2582                                 # Don't bitch that we couldn't find this file later
2583                                 try:
2584                                     self.later_check_files.remove(dsc_name)
2585                                 except ValueError:
2586                                     pass
2587
2588
2589                     if not match:
2590                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2591
2592             elif re_is_orig_source.match(dsc_name):
2593                 # Check in the pool
2594                 ql = get_poolfile_like_name(dsc_name, session)
2595
2596                 # Strip out anything that isn't '%s' or '/%s$'
2597                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2598                 for i in ql:
2599                     if not i.filename.endswith(dsc_name):
2600                         ql.remove(i)
2601
2602                 if len(ql) > 0:
2603                     # Unfortunately, we may get more than one match here if,
2604                     # for example, the package was in potato but had an -sa
2605                     # upload in woody.  So we need to choose the right one.
2606
2607                     # default to something sane in case we don't match any or have only one
2608                     x = ql[0]
2609
2610                     if len(ql) > 1:
2611                         for i in ql:
2612                             old_file = os.path.join(i.location.path, i.filename)
2613                             old_file_fh = utils.open_file(old_file)
2614                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2615                             old_file_fh.close()
2616                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2617                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2618                                 x = i
2619
2620                     old_file = os.path.join(i.location.path, i.filename)
2621                     old_file_fh = utils.open_file(old_file)
2622                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2623                     old_file_fh.close()
2624                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2625                     found = old_file
2626                     suite_type = x.location.archive_type
2627                     # need this for updating dsc_files in install()
2628                     dsc_entry["files id"] = x.file_id
2629                     # See install() in process-accepted...
2630                     if not orig_files.has_key(dsc_name):
2631                         orig_files[dsc_name] = {}
2632                     orig_files[dsc_name]["id"] = x.file_id
2633                     orig_files[dsc_name]["path"] = old_file
2634                     orig_files[dsc_name]["location"] = x.location.location_id
2635                 else:
2636                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2637                     # Not there? Check the queue directories...
2638                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2639                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2640                             continue
2641                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2642                         if os.path.exists(in_otherdir):
2643                             in_otherdir_fh = utils.open_file(in_otherdir)
2644                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2645                             in_otherdir_fh.close()
2646                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2647                             found = in_otherdir
2648                             if not orig_files.has_key(dsc_name):
2649                                 orig_files[dsc_name] = {}
2650                             orig_files[dsc_name]["path"] = in_otherdir
2651
2652                     if not found:
2653                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2654                         continue
2655             else:
2656                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2657                 continue
2658             if actual_md5 != dsc_entry["md5sum"]:
2659                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2660             if actual_size != int(dsc_entry["size"]):
2661                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2662
2663     ################################################################################
2664     # This is used by process-new and process-holding to recheck a changes file
2665     # at the time we're running.  It mainly wraps various other internal functions
2666     # and is similar to accepted_checks - these should probably be tidied up
2667     # and combined
2668     def recheck(self, session):
2669         cnf = Config()
2670         for f in self.pkg.files.keys():
2671             # The .orig.tar.gz can disappear out from under us is it's a
2672             # duplicate of one in the archive.
2673             if not self.pkg.files.has_key(f):
2674                 continue
2675
2676             entry = self.pkg.files[f]
2677
2678             # Check that the source still exists
2679             if entry["type"] == "deb":
2680                 source_version = entry["source version"]
2681                 source_package = entry["source package"]
2682                 if not self.pkg.changes["architecture"].has_key("source") \
2683                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2684                     source_epochless_version = re_no_epoch.sub('', source_version)
2685                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2686                     found = False
2687                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2688                         if cnf.has_key("Dir::Queue::%s" % (q)):
2689                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2690                                 found = True
2691                     if not found:
2692                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2693
2694             # Version and file overwrite checks
2695             if entry["type"] == "deb":
2696                 self.check_binary_against_db(f, session)
2697             elif entry["type"] == "dsc":
2698                 self.check_source_against_db(f, session)
2699                 self.check_dsc_against_db(f, session)
2700
2701     ################################################################################
2702     def accepted_checks(self, overwrite_checks, session):
2703         # Recheck anything that relies on the database; since that's not
2704         # frozen between accept and our run time when called from p-a.
2705
2706         # overwrite_checks is set to False when installing to stable/oldstable
2707
2708         propogate={}
2709         nopropogate={}
2710
2711         # Find the .dsc (again)
2712         dsc_filename = None
2713         for f in self.pkg.files.keys():
2714             if self.pkg.files[f]["type"] == "dsc":
2715                 dsc_filename = f
2716
2717         for checkfile in self.pkg.files.keys():
2718             # The .orig.tar.gz can disappear out from under us is it's a
2719             # duplicate of one in the archive.
2720             if not self.pkg.files.has_key(checkfile):
2721                 continue
2722
2723             entry = self.pkg.files[checkfile]
2724
2725             # Check that the source still exists
2726             if entry["type"] == "deb":
2727                 source_version = entry["source version"]
2728                 source_package = entry["source package"]
2729                 if not self.pkg.changes["architecture"].has_key("source") \
2730                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2731                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2732
2733             # Version and file overwrite checks
2734             if overwrite_checks:
2735                 if entry["type"] == "deb":
2736                     self.check_binary_against_db(checkfile, session)
2737                 elif entry["type"] == "dsc":
2738                     self.check_source_against_db(checkfile, session)
2739                     self.check_dsc_against_db(dsc_filename, session)
2740
2741             # propogate in the case it is in the override tables:
2742             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2743                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2744                     propogate[suite] = 1
2745                 else:
2746                     nopropogate[suite] = 1
2747
2748         for suite in propogate.keys():
2749             if suite in nopropogate:
2750                 continue
2751             self.pkg.changes["distribution"][suite] = 1
2752
2753         for checkfile in self.pkg.files.keys():
2754             # Check the package is still in the override tables
2755             for suite in self.pkg.changes["distribution"].keys():
2756                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2757                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2758
2759     ################################################################################
2760     # If any file of an upload has a recent mtime then chances are good
2761     # the file is still being uploaded.
2762
2763     def upload_too_new(self):
2764         cnf = Config()
2765         too_new = False
2766         # Move back to the original directory to get accurate time stamps
2767         cwd = os.getcwd()
2768         os.chdir(self.pkg.directory)
2769         file_list = self.pkg.files.keys()
2770         file_list.extend(self.pkg.dsc_files.keys())
2771         file_list.append(self.pkg.changes_file)
2772         for f in file_list:
2773             try:
2774                 last_modified = time.time()-os.path.getmtime(f)
2775                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2776                     too_new = True
2777                     break
2778             except:
2779                 pass
2780
2781         os.chdir(cwd)
2782         return too_new
2783
2784     def store_changelog(self):
2785
2786         # Skip binary-only upload if it is not a bin-NMU
2787         if not self.pkg.changes['architecture'].has_key('source'):
2788             from daklib.regexes import re_bin_only_nmu
2789             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2790                 return
2791
2792         session = DBConn().session()
2793
2794         # Check if upload already has a changelog entry
2795         query = """SELECT changelog_id FROM changes WHERE source = :source
2796                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2797         if session.execute(query, {'source': self.pkg.changes['source'], \
2798                                    'version': self.pkg.changes['version'], \
2799                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2800             session.commit()
2801             return
2802
2803         # Add current changelog text into changelogs_text table, return created ID
2804         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2805         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2806
2807         # Link ID to the upload available in changes table
2808         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2809                    AND version = :version AND architecture = :architecture"""
2810         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2811                                 'version': self.pkg.changes['version'], \
2812                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2813
2814         session.commit()