]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Use announce value out of the database
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
166         if override:
167             (olderr, newerr) = (get_suite(suite, session) == None,
168                                 get_suite(override, session) == None)
169             if olderr or newerr:
170                 (oinv, newinv) = ("", "")
171                 if olderr: oinv = "invalid "
172                 if newerr: ninv = "invalid "
173                 print "warning: overriding %ssuite %s to %ssuite %s" % (
174                         oinv, suite, ninv, override)
175             del changes["suite"][suite]
176             changes["suite"][override] = 1
177
178     # Check for unprocessed byhand files
179     if dbchg is not None:
180         for b in byhand.keys():
181             # Find the file entry in the database
182             found = False
183             for f in dbchg.files:
184                 if f.filename == b:
185                     found = True
186                     # If it's processed, we can ignore it
187                     if f.processed:
188                         del byhand[b]
189                     break
190
191             if not found:
192                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
193
194     # Check for new stuff
195     for suite in changes["suite"].keys():
196         for pkg in new.keys():
197             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
198             if len(ql) > 0:
199                 for file_entry in new[pkg]["files"]:
200                     if files[file_entry].has_key("new"):
201                         del files[file_entry]["new"]
202                 del new[pkg]
203
204     if warn:
205         for s in ['stable', 'oldstable']:
206             if changes["suite"].has_key(s):
207                 print "WARNING: overrides will be added for %s!" % s
208         for pkg in new.keys():
209             if new[pkg].has_key("othercomponents"):
210                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
211
212     return new, byhand
213
214 ################################################################################
215
216 def check_valid(new, session = None):
217     """
218     Check if section and priority for NEW packages exist in database.
219     Additionally does sanity checks:
220       - debian-installer packages have to be udeb (or source)
221       - non debian-installer packages can not be udeb
222       - source priority can only be assigned to dsc file types
223
224     @type new: dict
225     @param new: Dict of new packages with their section, priority and type.
226
227     """
228     for pkg in new.keys():
229         section_name = new[pkg]["section"]
230         priority_name = new[pkg]["priority"]
231         file_type = new[pkg]["type"]
232
233         section = get_section(section_name, session)
234         if section is None:
235             new[pkg]["section id"] = -1
236         else:
237             new[pkg]["section id"] = section.section_id
238
239         priority = get_priority(priority_name, session)
240         if priority is None:
241             new[pkg]["priority id"] = -1
242         else:
243             new[pkg]["priority id"] = priority.priority_id
244
245         # Sanity checks
246         di = section_name.find("debian-installer") != -1
247
248         # If d-i, we must be udeb and vice-versa
249         if     (di and file_type not in ("udeb", "dsc")) or \
250            (not di and file_type == "udeb"):
251             new[pkg]["section id"] = -1
252
253         # If dsc we need to be source and vice-versa
254         if (priority == "source" and file_type != "dsc") or \
255            (priority != "source" and file_type == "dsc"):
256             new[pkg]["priority id"] = -1
257
258 ###############################################################################
259
260 # Used by Upload.check_timestamps
261 class TarTime(object):
262     def __init__(self, future_cutoff, past_cutoff):
263         self.reset()
264         self.future_cutoff = future_cutoff
265         self.past_cutoff = past_cutoff
266
267     def reset(self):
268         self.future_files = {}
269         self.ancient_files = {}
270
271     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
272         if MTime > self.future_cutoff:
273             self.future_files[Name] = MTime
274         if MTime < self.past_cutoff:
275             self.ancient_files[Name] = MTime
276
277 ###############################################################################
278
279 def prod_maintainer(notes, upload):
280     cnf = Config()
281
282     # Here we prepare an editor and get them ready to prod...
283     (fd, temp_filename) = utils.temp_filename()
284     temp_file = os.fdopen(fd, 'w')
285     for note in notes:
286         temp_file.write(note.comment)
287     temp_file.close()
288     editor = os.environ.get("EDITOR","vi")
289     answer = 'E'
290     while answer == 'E':
291         os.system("%s %s" % (editor, temp_filename))
292         temp_fh = utils.open_file(temp_filename)
293         prod_message = "".join(temp_fh.readlines())
294         temp_fh.close()
295         print "Prod message:"
296         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
297         prompt = "[P]rod, Edit, Abandon, Quit ?"
298         answer = "XXX"
299         while prompt.find(answer) == -1:
300             answer = utils.our_raw_input(prompt)
301             m = re_default_answer.search(prompt)
302             if answer == "":
303                 answer = m.group(1)
304             answer = answer[:1].upper()
305     os.unlink(temp_filename)
306     if answer == 'A':
307         return
308     elif answer == 'Q':
309         end()
310         sys.exit(0)
311     # Otherwise, do the proding...
312     user_email_address = utils.whoami() + " <%s>" % (
313         cnf["Dinstall::MyAdminAddress"])
314
315     Subst = upload.Subst
316
317     Subst["__FROM_ADDRESS__"] = user_email_address
318     Subst["__PROD_MESSAGE__"] = prod_message
319     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
320
321     prod_mail_message = utils.TemplateSubst(
322         Subst,cnf["Dir::Templates"]+"/process-new.prod")
323
324     # Send the prod mail
325     utils.send_mail(prod_mail_message)
326
327     print "Sent prodding message"
328
329 ################################################################################
330
331 def edit_note(note, upload, session):
332     # Write the current data to a temporary file
333     (fd, temp_filename) = utils.temp_filename()
334     editor = os.environ.get("EDITOR","vi")
335     answer = 'E'
336     while answer == 'E':
337         os.system("%s %s" % (editor, temp_filename))
338         temp_file = utils.open_file(temp_filename)
339         newnote = temp_file.read().rstrip()
340         temp_file.close()
341         print "New Note:"
342         print utils.prefix_multi_line_string(newnote,"  ")
343         prompt = "[D]one, Edit, Abandon, Quit ?"
344         answer = "XXX"
345         while prompt.find(answer) == -1:
346             answer = utils.our_raw_input(prompt)
347             m = re_default_answer.search(prompt)
348             if answer == "":
349                 answer = m.group(1)
350             answer = answer[:1].upper()
351     os.unlink(temp_filename)
352     if answer == 'A':
353         return
354     elif answer == 'Q':
355         end()
356         sys.exit(0)
357
358     comment = NewComment()
359     comment.package = upload.pkg.changes["source"]
360     comment.version = upload.pkg.changes["version"]
361     comment.comment = newnote
362     comment.author  = utils.whoami()
363     comment.trainee = bool(Options["Trainee"])
364     session.add(comment)
365     session.commit()
366
367 ###############################################################################
368
369 class Upload(object):
370     """
371     Everything that has to do with an upload processed.
372
373     """
374     def __init__(self):
375         self.logger = None
376         self.pkg = Changes()
377         self.reset()
378
379     ###########################################################################
380
381     def reset (self):
382         """ Reset a number of internal variables."""
383
384         # Initialize the substitution template map
385         cnf = Config()
386         self.Subst = {}
387         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
388         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
389         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
390         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
391
392         self.rejects = []
393         self.warnings = []
394         self.notes = []
395
396         self.later_check_files = []
397
398         self.pkg.reset()
399
400     def package_info(self):
401         """
402         Format various messages from this Upload to send to the maintainer.
403         """
404
405         msgs = (
406             ('Reject Reasons', self.rejects),
407             ('Warnings', self.warnings),
408             ('Notes', self.notes),
409         )
410
411         msg = ''
412         for title, messages in msgs:
413             if messages:
414                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
415         msg += '\n\n'
416
417         return msg
418
419     ###########################################################################
420     def update_subst(self):
421         """ Set up the per-package template substitution mappings """
422
423         cnf = Config()
424
425         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
426         if not self.pkg.changes.has_key("architecture") or not \
427            isinstance(self.pkg.changes["architecture"], dict):
428             self.pkg.changes["architecture"] = { "Unknown" : "" }
429
430         # and maintainer2047 may not exist.
431         if not self.pkg.changes.has_key("maintainer2047"):
432             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
433
434         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
435         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
436         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
437
438         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
439         if self.pkg.changes["architecture"].has_key("source") and \
440            self.pkg.changes["changedby822"] != "" and \
441            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
442
443             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
444             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
445             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
446         else:
447             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
448             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
449             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
450
451         # Process policy doesn't set the fingerprint field and I don't want to make it
452         # do it for now as I don't want to have to deal with the case where we accepted
453         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
454         # the meantime so the package will be remarked as rejectable.  Urgh.
455         # TODO: Fix this properly
456         if self.pkg.changes.has_key('fingerprint'):
457             session = DBConn().session()
458             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
459             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
460                 if self.pkg.changes.has_key("sponsoremail"):
461                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
462             session.close()
463
464         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
465             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
466
467         # Apply any global override of the Maintainer field
468         if cnf.get("Dinstall::OverrideMaintainer"):
469             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
470             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
471
472         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
473         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
474         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
475         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
476
477     ###########################################################################
478     def load_changes(self, filename):
479         """
480         Load a changes file and setup a dictionary around it. Also checks for mandantory
481         fields  within.
482
483         @type filename: string
484         @param filename: Changes filename, full path.
485
486         @rtype: boolean
487         @return: whether the changes file was valid or not.  We may want to
488                  reject even if this is True (see what gets put in self.rejects).
489                  This is simply to prevent us even trying things later which will
490                  fail because we couldn't properly parse the file.
491         """
492         Cnf = Config()
493         self.pkg.changes_file = filename
494
495         # Parse the .changes field into a dictionary
496         try:
497             self.pkg.changes.update(parse_changes(filename))
498         except CantOpenError:
499             self.rejects.append("%s: can't read file." % (filename))
500             return False
501         except ParseChangesError, line:
502             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
503             return False
504         except ChangesUnicodeError:
505             self.rejects.append("%s: changes file not proper utf-8" % (filename))
506             return False
507
508         # Parse the Files field from the .changes into another dictionary
509         try:
510             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
511         except ParseChangesError, line:
512             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
513             return False
514         except UnknownFormatError, format:
515             self.rejects.append("%s: unknown format '%s'." % (filename, format))
516             return False
517
518         # Check for mandatory fields
519         for i in ("distribution", "source", "binary", "architecture",
520                   "version", "maintainer", "files", "changes", "description"):
521             if not self.pkg.changes.has_key(i):
522                 # Avoid undefined errors later
523                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
524                 return False
525
526         # Strip a source version in brackets from the source field
527         if re_strip_srcver.search(self.pkg.changes["source"]):
528             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
529
530         # Ensure the source field is a valid package name.
531         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
532             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
533
534         # Split multi-value fields into a lower-level dictionary
535         for i in ("architecture", "distribution", "binary", "closes"):
536             o = self.pkg.changes.get(i, "")
537             if o != "":
538                 del self.pkg.changes[i]
539
540             self.pkg.changes[i] = {}
541
542             for j in o.split():
543                 self.pkg.changes[i][j] = 1
544
545         # Fix the Maintainer: field to be RFC822/2047 compatible
546         try:
547             (self.pkg.changes["maintainer822"],
548              self.pkg.changes["maintainer2047"],
549              self.pkg.changes["maintainername"],
550              self.pkg.changes["maintaineremail"]) = \
551                    fix_maintainer (self.pkg.changes["maintainer"])
552         except ParseMaintError, msg:
553             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
554                    % (filename, self.pkg.changes["maintainer"], msg))
555
556         # ...likewise for the Changed-By: field if it exists.
557         try:
558             (self.pkg.changes["changedby822"],
559              self.pkg.changes["changedby2047"],
560              self.pkg.changes["changedbyname"],
561              self.pkg.changes["changedbyemail"]) = \
562                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
563         except ParseMaintError, msg:
564             self.pkg.changes["changedby822"] = ""
565             self.pkg.changes["changedby2047"] = ""
566             self.pkg.changes["changedbyname"] = ""
567             self.pkg.changes["changedbyemail"] = ""
568
569             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
570                    % (filename, self.pkg.changes["changed-by"], msg))
571
572         # Ensure all the values in Closes: are numbers
573         if self.pkg.changes.has_key("closes"):
574             for i in self.pkg.changes["closes"].keys():
575                 if re_isanum.match (i) == None:
576                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
577
578         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
579         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
580         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
581
582         # Check the .changes is non-empty
583         if not self.pkg.files:
584             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
585             return False
586
587         # Changes was syntactically valid even if we'll reject
588         return True
589
590     ###########################################################################
591
592     def check_distributions(self):
593         "Check and map the Distribution field"
594
595         Cnf = Config()
596
597         # Handle suite mappings
598         for m in Cnf.ValueList("SuiteMappings"):
599             args = m.split()
600             mtype = args[0]
601             if mtype == "map" or mtype == "silent-map":
602                 (source, dest) = args[1:3]
603                 if self.pkg.changes["distribution"].has_key(source):
604                     del self.pkg.changes["distribution"][source]
605                     self.pkg.changes["distribution"][dest] = 1
606                     if mtype != "silent-map":
607                         self.notes.append("Mapping %s to %s." % (source, dest))
608                 if self.pkg.changes.has_key("distribution-version"):
609                     if self.pkg.changes["distribution-version"].has_key(source):
610                         self.pkg.changes["distribution-version"][source]=dest
611             elif mtype == "map-unreleased":
612                 (source, dest) = args[1:3]
613                 if self.pkg.changes["distribution"].has_key(source):
614                     for arch in self.pkg.changes["architecture"].keys():
615                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
616                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
617                             del self.pkg.changes["distribution"][source]
618                             self.pkg.changes["distribution"][dest] = 1
619                             break
620             elif mtype == "ignore":
621                 suite = args[1]
622                 if self.pkg.changes["distribution"].has_key(suite):
623                     del self.pkg.changes["distribution"][suite]
624                     self.warnings.append("Ignoring %s as a target suite." % (suite))
625             elif mtype == "reject":
626                 suite = args[1]
627                 if self.pkg.changes["distribution"].has_key(suite):
628                     self.rejects.append("Uploads to %s are not accepted." % (suite))
629             elif mtype == "propup-version":
630                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
631                 #
632                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
633                 if self.pkg.changes["distribution"].has_key(args[1]):
634                     self.pkg.changes.setdefault("distribution-version", {})
635                     for suite in args[2:]:
636                         self.pkg.changes["distribution-version"][suite] = suite
637
638         # Ensure there is (still) a target distribution
639         if len(self.pkg.changes["distribution"].keys()) < 1:
640             self.rejects.append("No valid distribution remaining.")
641
642         # Ensure target distributions exist
643         for suite in self.pkg.changes["distribution"].keys():
644             if not Cnf.has_key("Suite::%s" % (suite)):
645                 self.rejects.append("Unknown distribution `%s'." % (suite))
646
647     ###########################################################################
648
649     def binary_file_checks(self, f, session):
650         cnf = Config()
651         entry = self.pkg.files[f]
652
653         # Extract package control information
654         deb_file = utils.open_file(f)
655         try:
656             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
657         except:
658             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
659             deb_file.close()
660             # Can't continue, none of the checks on control would work.
661             return
662
663         # Check for mandantory "Description:"
664         deb_file.seek(0)
665         try:
666             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
667         except:
668             self.rejects.append("%s: Missing Description in binary package" % (f))
669             return
670
671         deb_file.close()
672
673         # Check for mandatory fields
674         for field in [ "Package", "Architecture", "Version" ]:
675             if control.Find(field) == None:
676                 # Can't continue
677                 self.rejects.append("%s: No %s field in control." % (f, field))
678                 return
679
680         # Ensure the package name matches the one give in the .changes
681         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
682             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
683
684         # Validate the package field
685         package = control.Find("Package")
686         if not re_valid_pkg_name.match(package):
687             self.rejects.append("%s: invalid package name '%s'." % (f, package))
688
689         # Validate the version field
690         version = control.Find("Version")
691         if not re_valid_version.match(version):
692             self.rejects.append("%s: invalid version number '%s'." % (f, version))
693
694         # Ensure the architecture of the .deb is one we know about.
695         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
696         architecture = control.Find("Architecture")
697         upload_suite = self.pkg.changes["distribution"].keys()[0]
698
699         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
700             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
701             self.rejects.append("Unknown architecture '%s'." % (architecture))
702
703         # Ensure the architecture of the .deb is one of the ones
704         # listed in the .changes.
705         if not self.pkg.changes["architecture"].has_key(architecture):
706             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
707
708         # Sanity-check the Depends field
709         depends = control.Find("Depends")
710         if depends == '':
711             self.rejects.append("%s: Depends field is empty." % (f))
712
713         # Sanity-check the Provides field
714         provides = control.Find("Provides")
715         if provides:
716             provide = re_spacestrip.sub('', provides)
717             if provide == '':
718                 self.rejects.append("%s: Provides field is empty." % (f))
719             prov_list = provide.split(",")
720             for prov in prov_list:
721                 if not re_valid_pkg_name.match(prov):
722                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
723
724         # Check the section & priority match those given in the .changes (non-fatal)
725         if     control.Find("Section") and entry["section"] != "" \
726            and entry["section"] != control.Find("Section"):
727             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
728                                 (f, control.Find("Section", ""), entry["section"]))
729         if control.Find("Priority") and entry["priority"] != "" \
730            and entry["priority"] != control.Find("Priority"):
731             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
732                                 (f, control.Find("Priority", ""), entry["priority"]))
733
734         entry["package"] = package
735         entry["architecture"] = architecture
736         entry["version"] = version
737         entry["maintainer"] = control.Find("Maintainer", "")
738
739         if f.endswith(".udeb"):
740             self.pkg.files[f]["dbtype"] = "udeb"
741         elif f.endswith(".deb"):
742             self.pkg.files[f]["dbtype"] = "deb"
743         else:
744             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
745
746         entry["source"] = control.Find("Source", entry["package"])
747
748         # Get the source version
749         source = entry["source"]
750         source_version = ""
751
752         if source.find("(") != -1:
753             m = re_extract_src_version.match(source)
754             source = m.group(1)
755             source_version = m.group(2)
756
757         if not source_version:
758             source_version = self.pkg.files[f]["version"]
759
760         entry["source package"] = source
761         entry["source version"] = source_version
762
763         # Ensure the filename matches the contents of the .deb
764         m = re_isadeb.match(f)
765
766         #  package name
767         file_package = m.group(1)
768         if entry["package"] != file_package:
769             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
770                                 (f, file_package, entry["dbtype"], entry["package"]))
771         epochless_version = re_no_epoch.sub('', control.Find("Version"))
772
773         #  version
774         file_version = m.group(2)
775         if epochless_version != file_version:
776             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
777                                 (f, file_version, entry["dbtype"], epochless_version))
778
779         #  architecture
780         file_architecture = m.group(3)
781         if entry["architecture"] != file_architecture:
782             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
783                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
784
785         # Check for existent source
786         source_version = entry["source version"]
787         source_package = entry["source package"]
788         if self.pkg.changes["architecture"].has_key("source"):
789             if source_version != self.pkg.changes["version"]:
790                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
791                                     (source_version, f, self.pkg.changes["version"]))
792         else:
793             # Check in the SQL database
794             if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
795                 # Check in one of the other directories
796                 source_epochless_version = re_no_epoch.sub('', source_version)
797                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
798                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
799                     entry["byhand"] = 1
800                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
801                     entry["new"] = 1
802                 else:
803                     dsc_file_exists = False
804                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
805                         if cnf.has_key("Dir::Queue::%s" % (myq)):
806                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
807                                 dsc_file_exists = True
808                                 break
809
810                     if not dsc_file_exists:
811                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
812
813         # Check the version and for file overwrites
814         self.check_binary_against_db(f, session)
815
816         # Temporarily disable contents generation until we change the table storage layout
817         #b = Binary(f)
818         #b.scan_package()
819         #if len(b.rejects) > 0:
820         #    for j in b.rejects:
821         #        self.rejects.append(j)
822
823     def source_file_checks(self, f, session):
824         entry = self.pkg.files[f]
825
826         m = re_issource.match(f)
827         if not m:
828             return
829
830         entry["package"] = m.group(1)
831         entry["version"] = m.group(2)
832         entry["type"] = m.group(3)
833
834         # Ensure the source package name matches the Source filed in the .changes
835         if self.pkg.changes["source"] != entry["package"]:
836             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
837
838         # Ensure the source version matches the version in the .changes file
839         if re_is_orig_source.match(f):
840             changes_version = self.pkg.changes["chopversion2"]
841         else:
842             changes_version = self.pkg.changes["chopversion"]
843
844         if changes_version != entry["version"]:
845             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
846
847         # Ensure the .changes lists source in the Architecture field
848         if not self.pkg.changes["architecture"].has_key("source"):
849             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
850
851         # Check the signature of a .dsc file
852         if entry["type"] == "dsc":
853             # check_signature returns either:
854             #  (None, [list, of, rejects]) or (signature, [])
855             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
856             for j in rejects:
857                 self.rejects.append(j)
858
859         entry["architecture"] = "source"
860
861     def per_suite_file_checks(self, f, suite, session):
862         cnf = Config()
863         entry = self.pkg.files[f]
864
865         # Skip byhand
866         if entry.has_key("byhand"):
867             return
868
869         # Check we have fields we need to do these checks
870         oktogo = True
871         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
872             if not entry.has_key(m):
873                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
874                 oktogo = False
875
876         if not oktogo:
877             return
878
879         # Handle component mappings
880         for m in cnf.ValueList("ComponentMappings"):
881             (source, dest) = m.split()
882             if entry["component"] == source:
883                 entry["original component"] = source
884                 entry["component"] = dest
885
886         # Ensure the component is valid for the target suite
887         if cnf.has_key("Suite:%s::Components" % (suite)) and \
888            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
889             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
890             return
891
892         # Validate the component
893         if not get_component(entry["component"], session):
894             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
895             return
896
897         # See if the package is NEW
898         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
899             entry["new"] = 1
900
901         # Validate the priority
902         if entry["priority"].find('/') != -1:
903             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
904
905         # Determine the location
906         location = cnf["Dir::Pool"]
907         l = get_location(location, entry["component"], session=session)
908         if l is None:
909             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
910             entry["location id"] = -1
911         else:
912             entry["location id"] = l.location_id
913
914         # Check the md5sum & size against existing files (if any)
915         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
916
917         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
918                                          entry["size"], entry["md5sum"], entry["location id"])
919
920         if found is None:
921             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
922         elif found is False and poolfile is not None:
923             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
924         else:
925             if poolfile is None:
926                 entry["files id"] = None
927             else:
928                 entry["files id"] = poolfile.file_id
929
930         # Check for packages that have moved from one component to another
931         entry['suite'] = suite
932         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
933         if res.rowcount > 0:
934             entry["othercomponents"] = res.fetchone()[0]
935
936     def check_files(self, action=True):
937         file_keys = self.pkg.files.keys()
938         holding = Holding()
939         cnf = Config()
940
941         if action:
942             cwd = os.getcwd()
943             os.chdir(self.pkg.directory)
944             for f in file_keys:
945                 ret = holding.copy_to_holding(f)
946                 if ret is not None:
947                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
948
949             os.chdir(cwd)
950
951         # check we already know the changes file
952         # [NB: this check must be done post-suite mapping]
953         base_filename = os.path.basename(self.pkg.changes_file)
954
955         session = DBConn().session()
956
957         try:
958             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
959             # if in the pool or in a queue other than unchecked, reject
960             if (dbc.in_queue is None) \
961                    or (dbc.in_queue is not None
962                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
963                 self.rejects.append("%s file already known to dak" % base_filename)
964         except NoResultFound, e:
965             # not known, good
966             pass
967
968         has_binaries = False
969         has_source = False
970
971         for f, entry in self.pkg.files.items():
972             # Ensure the file does not already exist in one of the accepted directories
973             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
974                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
975                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
976                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
977
978             if not re_taint_free.match(f):
979                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
980
981             # Check the file is readable
982             if os.access(f, os.R_OK) == 0:
983                 # When running in -n, copy_to_holding() won't have
984                 # generated the reject_message, so we need to.
985                 if action:
986                     if os.path.exists(f):
987                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
988                     else:
989                         # Don't directly reject, mark to check later to deal with orig's
990                         # we can find in the pool
991                         self.later_check_files.append(f)
992                 entry["type"] = "unreadable"
993                 continue
994
995             # If it's byhand skip remaining checks
996             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
997                 entry["byhand"] = 1
998                 entry["type"] = "byhand"
999
1000             # Checks for a binary package...
1001             elif re_isadeb.match(f):
1002                 has_binaries = True
1003                 entry["type"] = "deb"
1004
1005                 # This routine appends to self.rejects/warnings as appropriate
1006                 self.binary_file_checks(f, session)
1007
1008             # Checks for a source package...
1009             elif re_issource.match(f):
1010                 has_source = True
1011
1012                 # This routine appends to self.rejects/warnings as appropriate
1013                 self.source_file_checks(f, session)
1014
1015             # Not a binary or source package?  Assume byhand...
1016             else:
1017                 entry["byhand"] = 1
1018                 entry["type"] = "byhand"
1019
1020             # Per-suite file checks
1021             entry["oldfiles"] = {}
1022             for suite in self.pkg.changes["distribution"].keys():
1023                 self.per_suite_file_checks(f, suite, session)
1024
1025         session.close()
1026
1027         # If the .changes file says it has source, it must have source.
1028         if self.pkg.changes["architecture"].has_key("source"):
1029             if not has_source:
1030                 self.rejects.append("no source found and Architecture line in changes mention source.")
1031
1032             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1033                 self.rejects.append("source only uploads are not supported.")
1034
1035     ###########################################################################
1036     def check_dsc(self, action=True, session=None):
1037         """Returns bool indicating whether or not the source changes are valid"""
1038         # Ensure there is source to check
1039         if not self.pkg.changes["architecture"].has_key("source"):
1040             return True
1041
1042         # Find the .dsc
1043         dsc_filename = None
1044         for f, entry in self.pkg.files.items():
1045             if entry["type"] == "dsc":
1046                 if dsc_filename:
1047                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1048                     return False
1049                 else:
1050                     dsc_filename = f
1051
1052         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1053         if not dsc_filename:
1054             self.rejects.append("source uploads must contain a dsc file")
1055             return False
1056
1057         # Parse the .dsc file
1058         try:
1059             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1060         except CantOpenError:
1061             # if not -n copy_to_holding() will have done this for us...
1062             if not action:
1063                 self.rejects.append("%s: can't read file." % (dsc_filename))
1064         except ParseChangesError, line:
1065             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1066         except InvalidDscError, line:
1067             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1068         except ChangesUnicodeError:
1069             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1070
1071         # Build up the file list of files mentioned by the .dsc
1072         try:
1073             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1074         except NoFilesFieldError:
1075             self.rejects.append("%s: no Files: field." % (dsc_filename))
1076             return False
1077         except UnknownFormatError, format:
1078             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1079             return False
1080         except ParseChangesError, line:
1081             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1082             return False
1083
1084         # Enforce mandatory fields
1085         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1086             if not self.pkg.dsc.has_key(i):
1087                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1088                 return False
1089
1090         # Validate the source and version fields
1091         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1092             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1093         if not re_valid_version.match(self.pkg.dsc["version"]):
1094             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1095
1096         # Only a limited list of source formats are allowed in each suite
1097         for dist in self.pkg.changes["distribution"].keys():
1098             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1099             if self.pkg.dsc["format"] not in allowed:
1100                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1101
1102         # Validate the Maintainer field
1103         try:
1104             # We ignore the return value
1105             fix_maintainer(self.pkg.dsc["maintainer"])
1106         except ParseMaintError, msg:
1107             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1108                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1109
1110         # Validate the build-depends field(s)
1111         for field_name in [ "build-depends", "build-depends-indep" ]:
1112             field = self.pkg.dsc.get(field_name)
1113             if field:
1114                 # Have apt try to parse them...
1115                 try:
1116                     apt_pkg.ParseSrcDepends(field)
1117                 except:
1118                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1119
1120         # Ensure the version number in the .dsc matches the version number in the .changes
1121         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1122         changes_version = self.pkg.files[dsc_filename]["version"]
1123
1124         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1125             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1126
1127         # Ensure the Files field contain only what's expected
1128         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1129
1130         # Ensure source is newer than existing source in target suites
1131         session = DBConn().session()
1132         self.check_source_against_db(dsc_filename, session)
1133         self.check_dsc_against_db(dsc_filename, session)
1134
1135         dbchg = get_dbchange(self.pkg.changes_file, session)
1136
1137         # Finally, check if we're missing any files
1138         for f in self.later_check_files:
1139             print 'XXX: %s' % f
1140             # Check if we've already processed this file if we have a dbchg object
1141             ok = False
1142             if dbchg:
1143                 for pf in dbchg.files:
1144                     if pf.filename == f and pf.processed:
1145                         self.notes.append('%s was already processed so we can go ahead' % f)
1146                         ok = True
1147                         del self.pkg.files[f]
1148             if not ok:
1149                 self.rejects.append("Could not find file %s references in changes" % f)
1150
1151         session.close()
1152
1153         return True
1154
1155     ###########################################################################
1156
1157     def get_changelog_versions(self, source_dir):
1158         """Extracts a the source package and (optionally) grabs the
1159         version history out of debian/changelog for the BTS."""
1160
1161         cnf = Config()
1162
1163         # Find the .dsc (again)
1164         dsc_filename = None
1165         for f in self.pkg.files.keys():
1166             if self.pkg.files[f]["type"] == "dsc":
1167                 dsc_filename = f
1168
1169         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1170         if not dsc_filename:
1171             return
1172
1173         # Create a symlink mirror of the source files in our temporary directory
1174         for f in self.pkg.files.keys():
1175             m = re_issource.match(f)
1176             if m:
1177                 src = os.path.join(source_dir, f)
1178                 # If a file is missing for whatever reason, give up.
1179                 if not os.path.exists(src):
1180                     return
1181                 ftype = m.group(3)
1182                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1183                    self.pkg.orig_files[f].has_key("path"):
1184                     continue
1185                 dest = os.path.join(os.getcwd(), f)
1186                 os.symlink(src, dest)
1187
1188         # If the orig files are not a part of the upload, create symlinks to the
1189         # existing copies.
1190         for orig_file in self.pkg.orig_files.keys():
1191             if not self.pkg.orig_files[orig_file].has_key("path"):
1192                 continue
1193             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1194             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1195
1196         # Extract the source
1197         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1198         (result, output) = commands.getstatusoutput(cmd)
1199         if (result != 0):
1200             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1201             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1202             return
1203
1204         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1205             return
1206
1207         # Get the upstream version
1208         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1209         if re_strip_revision.search(upstr_version):
1210             upstr_version = re_strip_revision.sub('', upstr_version)
1211
1212         # Ensure the changelog file exists
1213         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1214         if not os.path.exists(changelog_filename):
1215             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1216             return
1217
1218         # Parse the changelog
1219         self.pkg.dsc["bts changelog"] = ""
1220         changelog_file = utils.open_file(changelog_filename)
1221         for line in changelog_file.readlines():
1222             m = re_changelog_versions.match(line)
1223             if m:
1224                 self.pkg.dsc["bts changelog"] += line
1225         changelog_file.close()
1226
1227         # Check we found at least one revision in the changelog
1228         if not self.pkg.dsc["bts changelog"]:
1229             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1230
1231     def check_source(self):
1232         # Bail out if:
1233         #    a) there's no source
1234         if not self.pkg.changes["architecture"].has_key("source"):
1235             return
1236
1237         tmpdir = utils.temp_dirname()
1238
1239         # Move into the temporary directory
1240         cwd = os.getcwd()
1241         os.chdir(tmpdir)
1242
1243         # Get the changelog version history
1244         self.get_changelog_versions(cwd)
1245
1246         # Move back and cleanup the temporary tree
1247         os.chdir(cwd)
1248
1249         try:
1250             shutil.rmtree(tmpdir)
1251         except OSError, e:
1252             if e.errno != errno.EACCES:
1253                 print "foobar"
1254                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1255
1256             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1257             # We probably have u-r or u-w directories so chmod everything
1258             # and try again.
1259             cmd = "chmod -R u+rwx %s" % (tmpdir)
1260             result = os.system(cmd)
1261             if result != 0:
1262                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1263             shutil.rmtree(tmpdir)
1264         except Exception, e:
1265             print "foobar2 (%s)" % e
1266             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1267
1268     ###########################################################################
1269     def ensure_hashes(self):
1270         # Make sure we recognise the format of the Files: field in the .changes
1271         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1272         if len(format) == 2:
1273             format = int(format[0]), int(format[1])
1274         else:
1275             format = int(float(format[0])), 0
1276
1277         # We need to deal with the original changes blob, as the fields we need
1278         # might not be in the changes dict serialised into the .dak anymore.
1279         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1280
1281         # Copy the checksums over to the current changes dict.  This will keep
1282         # the existing modifications to it intact.
1283         for field in orig_changes:
1284             if field.startswith('checksums-'):
1285                 self.pkg.changes[field] = orig_changes[field]
1286
1287         # Check for unsupported hashes
1288         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1289             self.rejects.append(j)
1290
1291         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1292             self.rejects.append(j)
1293
1294         # We have to calculate the hash if we have an earlier changes version than
1295         # the hash appears in rather than require it exist in the changes file
1296         for hashname, hashfunc, version in utils.known_hashes:
1297             # TODO: Move _ensure_changes_hash into this class
1298             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1299                 self.rejects.append(j)
1300             if "source" in self.pkg.changes["architecture"]:
1301                 # TODO: Move _ensure_dsc_hash into this class
1302                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1303                     self.rejects.append(j)
1304
1305     def check_hashes(self):
1306         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1307             self.rejects.append(m)
1308
1309         for m in utils.check_size(".changes", self.pkg.files):
1310             self.rejects.append(m)
1311
1312         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1313             self.rejects.append(m)
1314
1315         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1316             self.rejects.append(m)
1317
1318         self.ensure_hashes()
1319
1320     ###########################################################################
1321
1322     def ensure_orig(self, target_dir='.', session=None):
1323         """
1324         Ensures that all orig files mentioned in the changes file are present
1325         in target_dir. If they do not exist, they are symlinked into place.
1326
1327         An list containing the symlinks that were created are returned (so they
1328         can be removed).
1329         """
1330
1331         symlinked = []
1332         cnf = Config()
1333
1334         for filename, entry in self.pkg.dsc_files.iteritems():
1335             if not re_is_orig_source.match(filename):
1336                 # File is not an orig; ignore
1337                 continue
1338
1339             if os.path.exists(filename):
1340                 # File exists, no need to continue
1341                 continue
1342
1343             def symlink_if_valid(path):
1344                 f = utils.open_file(path)
1345                 md5sum = apt_pkg.md5sum(f)
1346                 f.close()
1347
1348                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1349                 expected = (int(entry['size']), entry['md5sum'])
1350
1351                 if fingerprint != expected:
1352                     return False
1353
1354                 dest = os.path.join(target_dir, filename)
1355
1356                 os.symlink(path, dest)
1357                 symlinked.append(dest)
1358
1359                 return True
1360
1361             session_ = session
1362             if session is None:
1363                 session_ = DBConn().session()
1364
1365             found = False
1366
1367             # Look in the pool
1368             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1369                 poolfile_path = os.path.join(
1370                     poolfile.location.path, poolfile.filename
1371                 )
1372
1373                 if symlink_if_valid(poolfile_path):
1374                     found = True
1375                     break
1376
1377             if session is None:
1378                 session_.close()
1379
1380             if found:
1381                 continue
1382
1383             # Look in some other queues for the file
1384             queues = ('New', 'Byhand', 'ProposedUpdates',
1385                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1386
1387             for queue in queues:
1388                 if not cnf.get('Dir::Queue::%s' % queue):
1389                     continue
1390
1391                 queuefile_path = os.path.join(
1392                     cnf['Dir::Queue::%s' % queue], filename
1393                 )
1394
1395                 if not os.path.exists(queuefile_path):
1396                     # Does not exist in this queue
1397                     continue
1398
1399                 if symlink_if_valid(queuefile_path):
1400                     break
1401
1402         return symlinked
1403
1404     ###########################################################################
1405
1406     def check_lintian(self):
1407         """
1408         Extends self.rejects by checking the output of lintian against tags
1409         specified in Dinstall::LintianTags.
1410         """
1411
1412         cnf = Config()
1413
1414         # Don't reject binary uploads
1415         if not self.pkg.changes['architecture'].has_key('source'):
1416             return
1417
1418         # Only check some distributions
1419         for dist in ('unstable', 'experimental'):
1420             if dist in self.pkg.changes['distribution']:
1421                 break
1422         else:
1423             return
1424
1425         # If we do not have a tagfile, don't do anything
1426         tagfile = cnf.get("Dinstall::LintianTags")
1427         if tagfile is None:
1428             return
1429
1430         # Parse the yaml file
1431         sourcefile = file(tagfile, 'r')
1432         sourcecontent = sourcefile.read()
1433         sourcefile.close()
1434
1435         try:
1436             lintiantags = yaml.load(sourcecontent)['lintian']
1437         except yaml.YAMLError, msg:
1438             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1439             return
1440
1441         # Try and find all orig mentioned in the .dsc
1442         symlinked = self.ensure_orig()
1443
1444         # Setup the input file for lintian
1445         fd, temp_filename = utils.temp_filename()
1446         temptagfile = os.fdopen(fd, 'w')
1447         for tags in lintiantags.values():
1448             temptagfile.writelines(['%s\n' % x for x in tags])
1449         temptagfile.close()
1450
1451         try:
1452             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1453                 (temp_filename, self.pkg.changes_file)
1454
1455             result, output = commands.getstatusoutput(cmd)
1456         finally:
1457             # Remove our tempfile and any symlinks we created
1458             os.unlink(temp_filename)
1459
1460             for symlink in symlinked:
1461                 os.unlink(symlink)
1462
1463         if result == 2:
1464             utils.warn("lintian failed for %s [return code: %s]." % \
1465                 (self.pkg.changes_file, result))
1466             utils.warn(utils.prefix_multi_line_string(output, \
1467                 " [possible output:] "))
1468
1469         def log(*txt):
1470             if self.logger:
1471                 self.logger.log(
1472                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1473                 )
1474
1475         # Generate messages
1476         parsed_tags = parse_lintian_output(output)
1477         self.rejects.extend(
1478             generate_reject_messages(parsed_tags, lintiantags, log=log)
1479         )
1480
1481     ###########################################################################
1482     def check_urgency(self):
1483         cnf = Config()
1484         if self.pkg.changes["architecture"].has_key("source"):
1485             if not self.pkg.changes.has_key("urgency"):
1486                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1487             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1488             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1489                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1490                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1491                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1492
1493     ###########################################################################
1494
1495     # Sanity check the time stamps of files inside debs.
1496     # [Files in the near future cause ugly warnings and extreme time
1497     #  travel can cause errors on extraction]
1498
1499     def check_timestamps(self):
1500         Cnf = Config()
1501
1502         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1503         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1504         tar = TarTime(future_cutoff, past_cutoff)
1505
1506         for filename, entry in self.pkg.files.items():
1507             if entry["type"] == "deb":
1508                 tar.reset()
1509                 try:
1510                     deb_file = utils.open_file(filename)
1511                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1512                     deb_file.seek(0)
1513                     try:
1514                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1515                     except SystemError, e:
1516                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1517                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1518                             raise
1519                         deb_file.seek(0)
1520                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1521
1522                     deb_file.close()
1523
1524                     future_files = tar.future_files.keys()
1525                     if future_files:
1526                         num_future_files = len(future_files)
1527                         future_file = future_files[0]
1528                         future_date = tar.future_files[future_file]
1529                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1530                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1531
1532                     ancient_files = tar.ancient_files.keys()
1533                     if ancient_files:
1534                         num_ancient_files = len(ancient_files)
1535                         ancient_file = ancient_files[0]
1536                         ancient_date = tar.ancient_files[ancient_file]
1537                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1538                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1539                 except:
1540                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1541
1542     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1543         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1544             sponsored = False
1545         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1546             sponsored = False
1547             if uid_name == "":
1548                 sponsored = True
1549         else:
1550             sponsored = True
1551             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1552                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1553                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1554                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1555                         self.pkg.changes["sponsoremail"] = uid_email
1556
1557         return sponsored
1558
1559
1560     ###########################################################################
1561     # check_signed_by_key checks
1562     ###########################################################################
1563
1564     def check_signed_by_key(self):
1565         """Ensure the .changes is signed by an authorized uploader."""
1566         session = DBConn().session()
1567
1568         # First of all we check that the person has proper upload permissions
1569         # and that this upload isn't blocked
1570         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1571
1572         if fpr is None:
1573             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1574             return
1575
1576         # TODO: Check that import-keyring adds UIDs properly
1577         if not fpr.uid:
1578             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1579             return
1580
1581         # Check that the fingerprint which uploaded has permission to do so
1582         self.check_upload_permissions(fpr, session)
1583
1584         # Check that this package is not in a transition
1585         self.check_transition(session)
1586
1587         session.close()
1588
1589
1590     def check_upload_permissions(self, fpr, session):
1591         # Check any one-off upload blocks
1592         self.check_upload_blocks(fpr, session)
1593
1594         # Start with DM as a special case
1595         # DM is a special case unfortunately, so we check it first
1596         # (keys with no source access get more access than DMs in one
1597         #  way; DMs can only upload for their packages whether source
1598         #  or binary, whereas keys with no access might be able to
1599         #  upload some binaries)
1600         if fpr.source_acl.access_level == 'dm':
1601             self.check_dm_upload(fpr, session)
1602         else:
1603             # Check source-based permissions for other types
1604             if self.pkg.changes["architecture"].has_key("source") and \
1605                 fpr.source_acl.access_level is None:
1606                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1607                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1608                 self.rejects.append(rej)
1609                 return
1610             # If not a DM, we allow full upload rights
1611             uid_email = "%s@debian.org" % (fpr.uid.uid)
1612             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1613
1614
1615         # Check binary upload permissions
1616         # By this point we know that DMs can't have got here unless they
1617         # are allowed to deal with the package concerned so just apply
1618         # normal checks
1619         if fpr.binary_acl.access_level == 'full':
1620             return
1621
1622         # Otherwise we're in the map case
1623         tmparches = self.pkg.changes["architecture"].copy()
1624         tmparches.pop('source', None)
1625
1626         for bam in fpr.binary_acl_map:
1627             tmparches.pop(bam.architecture.arch_string, None)
1628
1629         if len(tmparches.keys()) > 0:
1630             if fpr.binary_reject:
1631                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1632                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1633                 self.rejects.append(rej)
1634             else:
1635                 # TODO: This is where we'll implement reject vs throw away binaries later
1636                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1637                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1638                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1639                 self.rejects.append(rej)
1640
1641
1642     def check_upload_blocks(self, fpr, session):
1643         """Check whether any upload blocks apply to this source, source
1644            version, uid / fpr combination"""
1645
1646         def block_rej_template(fb):
1647             rej = 'Manual upload block in place for package %s' % fb.source
1648             if fb.version is not None:
1649                 rej += ', version %s' % fb.version
1650             return rej
1651
1652         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1653             # version is None if the block applies to all versions
1654             if fb.version is None or fb.version == self.pkg.changes['version']:
1655                 # Check both fpr and uid - either is enough to cause a reject
1656                 if fb.fpr is not None:
1657                     if fb.fpr.fingerprint == fpr.fingerprint:
1658                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1659                 if fb.uid is not None:
1660                     if fb.uid == fpr.uid:
1661                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1662
1663
1664     def check_dm_upload(self, fpr, session):
1665         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1666         ## none of the uploaded packages are NEW
1667         rej = False
1668         for f in self.pkg.files.keys():
1669             if self.pkg.files[f].has_key("byhand"):
1670                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1671                 rej = True
1672             if self.pkg.files[f].has_key("new"):
1673                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1674                 rej = True
1675
1676         if rej:
1677             return
1678
1679         ## the most recent version of the package uploaded to unstable or
1680         ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1681         ## section of its control file
1682         q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1683         q = q.join(SrcAssociation)
1684         q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1685         q = q.order_by(desc('source.version')).limit(1)
1686
1687         r = q.all()
1688
1689         if len(r) != 1:
1690             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1691             self.rejects.append(rej)
1692             return
1693
1694         r = r[0]
1695         if not r.dm_upload_allowed:
1696             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1697             self.rejects.append(rej)
1698             return
1699
1700         ## the Maintainer: field of the uploaded .changes file corresponds with
1701         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1702         ## uploads)
1703         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1704             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1705
1706         ## the most recent version of the package uploaded to unstable or
1707         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1708         ## non-developer maintainers cannot NMU or hijack packages)
1709
1710         # srcuploaders includes the maintainer
1711         accept = False
1712         for sup in r.srcuploaders:
1713             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1714             # Eww - I hope we never have two people with the same name in Debian
1715             if email == fpr.uid.uid or name == fpr.uid.name:
1716                 accept = True
1717                 break
1718
1719         if not accept:
1720             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1721             return
1722
1723         ## none of the packages are being taken over from other source packages
1724         for b in self.pkg.changes["binary"].keys():
1725             for suite in self.pkg.changes["distribution"].keys():
1726                 q = session.query(DBSource)
1727                 q = q.join(DBBinary).filter_by(package=b)
1728                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1729
1730                 for s in q.all():
1731                     if s.source != self.pkg.changes["source"]:
1732                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1733
1734
1735
1736     def check_transition(self, session):
1737         cnf = Config()
1738
1739         sourcepkg = self.pkg.changes["source"]
1740
1741         # No sourceful upload -> no need to do anything else, direct return
1742         # We also work with unstable uploads, not experimental or those going to some
1743         # proposed-updates queue
1744         if "source" not in self.pkg.changes["architecture"] or \
1745            "unstable" not in self.pkg.changes["distribution"]:
1746             return
1747
1748         # Also only check if there is a file defined (and existant) with
1749         # checks.
1750         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1751         if transpath == "" or not os.path.exists(transpath):
1752             return
1753
1754         # Parse the yaml file
1755         sourcefile = file(transpath, 'r')
1756         sourcecontent = sourcefile.read()
1757         try:
1758             transitions = yaml.load(sourcecontent)
1759         except yaml.YAMLError, msg:
1760             # This shouldn't happen, there is a wrapper to edit the file which
1761             # checks it, but we prefer to be safe than ending up rejecting
1762             # everything.
1763             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1764             return
1765
1766         # Now look through all defined transitions
1767         for trans in transitions:
1768             t = transitions[trans]
1769             source = t["source"]
1770             expected = t["new"]
1771
1772             # Will be None if nothing is in testing.
1773             current = get_source_in_suite(source, "testing", session)
1774             if current is not None:
1775                 compare = apt_pkg.VersionCompare(current.version, expected)
1776
1777             if current is None or compare < 0:
1778                 # This is still valid, the current version in testing is older than
1779                 # the new version we wait for, or there is none in testing yet
1780
1781                 # Check if the source we look at is affected by this.
1782                 if sourcepkg in t['packages']:
1783                     # The source is affected, lets reject it.
1784
1785                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1786                         sourcepkg, trans)
1787
1788                     if current is not None:
1789                         currentlymsg = "at version %s" % (current.version)
1790                     else:
1791                         currentlymsg = "not present in testing"
1792
1793                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1794
1795                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1796 is part of a testing transition designed to get %s migrated (it is
1797 currently %s, we need version %s).  This transition is managed by the
1798 Release Team, and %s is the Release-Team member responsible for it.
1799 Please mail debian-release@lists.debian.org or contact %s directly if you
1800 need further assistance.  You might want to upload to experimental until this
1801 transition is done."""
1802                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1803
1804                     self.rejects.append(rejectmsg)
1805                     return
1806
1807     ###########################################################################
1808     # End check_signed_by_key checks
1809     ###########################################################################
1810
1811     def build_summaries(self):
1812         """ Build a summary of changes the upload introduces. """
1813
1814         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1815
1816         short_summary = summary
1817
1818         # This is for direport's benefit...
1819         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1820
1821         if byhand or new:
1822             summary += "Changes: " + f
1823
1824         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1825
1826         summary += self.announce(short_summary, 0)
1827
1828         return (summary, short_summary)
1829
1830     ###########################################################################
1831
1832     def close_bugs(self, summary, action):
1833         """
1834         Send mail to close bugs as instructed by the closes field in the changes file.
1835         Also add a line to summary if any work was done.
1836
1837         @type summary: string
1838         @param summary: summary text, as given by L{build_summaries}
1839
1840         @type action: bool
1841         @param action: Set to false no real action will be done.
1842
1843         @rtype: string
1844         @return: summary. If action was taken, extended by the list of closed bugs.
1845
1846         """
1847
1848         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1849
1850         bugs = self.pkg.changes["closes"].keys()
1851
1852         if not bugs:
1853             return summary
1854
1855         bugs.sort()
1856         summary += "Closing bugs: "
1857         for bug in bugs:
1858             summary += "%s " % (bug)
1859             if action:
1860                 self.update_subst()
1861                 self.Subst["__BUG_NUMBER__"] = bug
1862                 if self.pkg.changes["distribution"].has_key("stable"):
1863                     self.Subst["__STABLE_WARNING__"] = """
1864 Note that this package is not part of the released stable Debian
1865 distribution.  It may have dependencies on other unreleased software,
1866 or other instabilities.  Please take care if you wish to install it.
1867 The update will eventually make its way into the next released Debian
1868 distribution."""
1869                 else:
1870                     self.Subst["__STABLE_WARNING__"] = ""
1871                 mail_message = utils.TemplateSubst(self.Subst, template)
1872                 utils.send_mail(mail_message)
1873
1874                 # Clear up after ourselves
1875                 del self.Subst["__BUG_NUMBER__"]
1876                 del self.Subst["__STABLE_WARNING__"]
1877
1878         if action and self.logger:
1879             self.logger.log(["closing bugs"] + bugs)
1880
1881         summary += "\n"
1882
1883         return summary
1884
1885     ###########################################################################
1886
1887     def announce(self, short_summary, action):
1888         """
1889         Send an announce mail about a new upload.
1890
1891         @type short_summary: string
1892         @param short_summary: Short summary text to include in the mail
1893
1894         @type action: bool
1895         @param action: Set to false no real action will be done.
1896
1897         @rtype: string
1898         @return: Textstring about action taken.
1899
1900         """
1901
1902         cnf = Config()
1903         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1904
1905         # Only do announcements for source uploads with a recent dpkg-dev installed
1906         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1907            self.pkg.changes["architecture"].has_key("source"):
1908             return ""
1909
1910         lists_done = {}
1911         summary = ""
1912
1913         self.Subst["__SHORT_SUMMARY__"] = short_summary
1914
1915         for dist in self.pkg.changes["distribution"].keys():
1916             suite = get_suite(dist)
1917             announce_list = suite.announce
1918             if announce_list == "" or lists_done.has_key(announce_list):
1919                 continue
1920
1921             lists_done[announce_list] = 1
1922             summary += "Announcing to %s\n" % (announce_list)
1923
1924             if action:
1925                 self.update_subst()
1926                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1927                 if cnf.get("Dinstall::TrackingServer") and \
1928                    self.pkg.changes["architecture"].has_key("source"):
1929                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1930                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1931
1932                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1933                 utils.send_mail(mail_message)
1934
1935                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1936
1937         if cnf.FindB("Dinstall::CloseBugs"):
1938             summary = self.close_bugs(summary, action)
1939
1940         del self.Subst["__SHORT_SUMMARY__"]
1941
1942         return summary
1943
1944     ###########################################################################
1945     @session_wrapper
1946     def accept (self, summary, short_summary, session=None):
1947         """
1948         Accept an upload.
1949
1950         This moves all files referenced from the .changes into the pool,
1951         sends the accepted mail, announces to lists, closes bugs and
1952         also checks for override disparities. If enabled it will write out
1953         the version history for the BTS Version Tracking and will finally call
1954         L{queue_build}.
1955
1956         @type summary: string
1957         @param summary: Summary text
1958
1959         @type short_summary: string
1960         @param short_summary: Short summary
1961         """
1962
1963         cnf = Config()
1964         stats = SummaryStats()
1965
1966         print "Installing."
1967         self.logger.log(["installing changes", self.pkg.changes_file])
1968
1969         poolfiles = []
1970
1971         # Add the .dsc file to the DB first
1972         for newfile, entry in self.pkg.files.items():
1973             if entry["type"] == "dsc":
1974                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1975                 for j in pfs:
1976                     poolfiles.append(j)
1977
1978         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1979         for newfile, entry in self.pkg.files.items():
1980             if entry["type"] == "deb":
1981                 poolfiles.append(add_deb_to_db(self, newfile, session))
1982
1983         # If this is a sourceful diff only upload that is moving
1984         # cross-component we need to copy the .orig files into the new
1985         # component too for the same reasons as above.
1986         # XXX: mhy: I think this should be in add_dsc_to_db
1987         if self.pkg.changes["architecture"].has_key("source"):
1988             for orig_file in self.pkg.orig_files.keys():
1989                 if not self.pkg.orig_files[orig_file].has_key("id"):
1990                     continue # Skip if it's not in the pool
1991                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1992                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1993                     continue # Skip if the location didn't change
1994
1995                 # Do the move
1996                 oldf = get_poolfile_by_id(orig_file_id, session)
1997                 old_filename = os.path.join(oldf.location.path, oldf.filename)
1998                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
1999                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2000
2001                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2002
2003                 # TODO: Care about size/md5sum collisions etc
2004                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2005
2006                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2007                 if newf is None:
2008                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2009                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2010
2011                     session.flush()
2012
2013                     # Don't reference the old file from this changes
2014                     for p in poolfiles:
2015                         if p.file_id == oldf.file_id:
2016                             poolfiles.remove(p)
2017
2018                     poolfiles.append(newf)
2019
2020                     # Fix up the DSC references
2021                     toremove = []
2022
2023                     for df in source.srcfiles:
2024                         if df.poolfile.file_id == oldf.file_id:
2025                             # Add a new DSC entry and mark the old one for deletion
2026                             # Don't do it in the loop so we don't change the thing we're iterating over
2027                             newdscf = DSCFile()
2028                             newdscf.source_id = source.source_id
2029                             newdscf.poolfile_id = newf.file_id
2030                             session.add(newdscf)
2031
2032                             toremove.append(df)
2033
2034                     for df in toremove:
2035                         session.delete(df)
2036
2037                     # Flush our changes
2038                     session.flush()
2039
2040                     # Make sure that our source object is up-to-date
2041                     session.expire(source)
2042
2043         # Add changelog information to the database
2044         self.store_changelog()
2045
2046         # Install the files into the pool
2047         for newfile, entry in self.pkg.files.items():
2048             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2049             utils.move(newfile, destination)
2050             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2051             stats.accept_bytes += float(entry["size"])
2052
2053         # Copy the .changes file across for suite which need it.
2054         copy_changes = {}
2055         for suite_name in self.pkg.changes["distribution"].keys():
2056             if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2057                 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2058
2059         for dest in copy_changes.keys():
2060             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2061
2062         # We're done - commit the database changes
2063         session.commit()
2064         # Our SQL session will automatically start a new transaction after
2065         # the last commit
2066
2067         # Move the .changes into the 'done' directory
2068         utils.move(self.pkg.changes_file,
2069                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2070
2071         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2072             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2073
2074         self.update_subst()
2075         self.Subst["__SUMMARY__"] = summary
2076         mail_message = utils.TemplateSubst(self.Subst,
2077                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2078         utils.send_mail(mail_message)
2079         self.announce(short_summary, 1)
2080
2081         ## Helper stuff for DebBugs Version Tracking
2082         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2083             if self.pkg.changes["architecture"].has_key("source"):
2084                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2085                 version_history = os.fdopen(fd, 'w')
2086                 version_history.write(self.pkg.dsc["bts changelog"])
2087                 version_history.close()
2088                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2089                                       self.pkg.changes_file[:-8]+".versions")
2090                 os.rename(temp_filename, filename)
2091                 os.chmod(filename, 0644)
2092
2093             # Write out the binary -> source mapping.
2094             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2095             debinfo = os.fdopen(fd, 'w')
2096             for name, entry in sorted(self.pkg.files.items()):
2097                 if entry["type"] == "deb":
2098                     line = " ".join([entry["package"], entry["version"],
2099                                      entry["architecture"], entry["source package"],
2100                                      entry["source version"]])
2101                     debinfo.write(line+"\n")
2102             debinfo.close()
2103             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2104                                   self.pkg.changes_file[:-8]+".debinfo")
2105             os.rename(temp_filename, filename)
2106             os.chmod(filename, 0644)
2107
2108         session.commit()
2109
2110         # Set up our copy queues (e.g. buildd queues)
2111         for suite_name in self.pkg.changes["distribution"].keys():
2112             suite = get_suite(suite_name, session)
2113             for q in suite.copy_queues:
2114                 for f in poolfiles:
2115                     q.add_file_from_pool(f)
2116
2117         session.commit()
2118
2119         # Finally...
2120         stats.accept_count += 1
2121
2122     def check_override(self):
2123         """
2124         Checks override entries for validity. Mails "Override disparity" warnings,
2125         if that feature is enabled.
2126
2127         Abandons the check if
2128           - override disparity checks are disabled
2129           - mail sending is disabled
2130         """
2131
2132         cnf = Config()
2133
2134         # Abandon the check if override disparity checks have been disabled
2135         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2136             return
2137
2138         summary = self.pkg.check_override()
2139
2140         if summary == "":
2141             return
2142
2143         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2144
2145         self.update_subst()
2146         self.Subst["__SUMMARY__"] = summary
2147         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2148         utils.send_mail(mail_message)
2149         del self.Subst["__SUMMARY__"]
2150
2151     ###########################################################################
2152
2153     def remove(self, from_dir=None):
2154         """
2155         Used (for instance) in p-u to remove the package from unchecked
2156
2157         Also removes the package from holding area.
2158         """
2159         if from_dir is None:
2160             from_dir = self.pkg.directory
2161         h = Holding()
2162
2163         for f in self.pkg.files.keys():
2164             os.unlink(os.path.join(from_dir, f))
2165             if os.path.exists(os.path.join(h.holding_dir, f)):
2166                 os.unlink(os.path.join(h.holding_dir, f))
2167
2168         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2169         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2170             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2171
2172     ###########################################################################
2173
2174     def move_to_queue (self, queue):
2175         """
2176         Move files to a destination queue using the permissions in the table
2177         """
2178         h = Holding()
2179         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2180                    queue.path, perms=int(queue.change_perms, 8))
2181         for f in self.pkg.files.keys():
2182             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2183
2184     ###########################################################################
2185
2186     def force_reject(self, reject_files):
2187         """
2188         Forcefully move files from the current directory to the
2189         reject directory.  If any file already exists in the reject
2190         directory it will be moved to the morgue to make way for
2191         the new file.
2192
2193         @type reject_files: dict
2194         @param reject_files: file dictionary
2195
2196         """
2197
2198         cnf = Config()
2199
2200         for file_entry in reject_files:
2201             # Skip any files which don't exist or which we don't have permission to copy.
2202             if os.access(file_entry, os.R_OK) == 0:
2203                 continue
2204
2205             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2206
2207             try:
2208                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2209             except OSError, e:
2210                 # File exists?  Let's find a new name by adding a number
2211                 if e.errno == errno.EEXIST:
2212                     try:
2213                         dest_file = utils.find_next_free(dest_file, 255)
2214                     except NoFreeFilenameError:
2215                         # Something's either gone badly Pete Tong, or
2216                         # someone is trying to exploit us.
2217                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2218                         return
2219
2220                     # Make sure we really got it
2221                     try:
2222                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2223                     except OSError, e:
2224                         # Likewise
2225                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2226                         return
2227                 else:
2228                     raise
2229             # If we got here, we own the destination file, so we can
2230             # safely overwrite it.
2231             utils.move(file_entry, dest_file, 1, perms=0660)
2232             os.close(dest_fd)
2233
2234     ###########################################################################
2235     def do_reject (self, manual=0, reject_message="", notes=""):
2236         """
2237         Reject an upload. If called without a reject message or C{manual} is
2238         true, spawn an editor so the user can write one.
2239
2240         @type manual: bool
2241         @param manual: manual or automated rejection
2242
2243         @type reject_message: string
2244         @param reject_message: A reject message
2245
2246         @return: 0
2247
2248         """
2249         # If we weren't given a manual rejection message, spawn an
2250         # editor so the user can add one in...
2251         if manual and not reject_message:
2252             (fd, temp_filename) = utils.temp_filename()
2253             temp_file = os.fdopen(fd, 'w')
2254             if len(notes) > 0:
2255                 for note in notes:
2256                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2257                                     % (note.author, note.version, note.notedate, note.comment))
2258             temp_file.close()
2259             editor = os.environ.get("EDITOR","vi")
2260             answer = 'E'
2261             while answer == 'E':
2262                 os.system("%s %s" % (editor, temp_filename))
2263                 temp_fh = utils.open_file(temp_filename)
2264                 reject_message = "".join(temp_fh.readlines())
2265                 temp_fh.close()
2266                 print "Reject message:"
2267                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2268                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2269                 answer = "XXX"
2270                 while prompt.find(answer) == -1:
2271                     answer = utils.our_raw_input(prompt)
2272                     m = re_default_answer.search(prompt)
2273                     if answer == "":
2274                         answer = m.group(1)
2275                     answer = answer[:1].upper()
2276             os.unlink(temp_filename)
2277             if answer == 'A':
2278                 return 1
2279             elif answer == 'Q':
2280                 sys.exit(0)
2281
2282         print "Rejecting.\n"
2283
2284         cnf = Config()
2285
2286         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2287         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2288
2289         # Move all the files into the reject directory
2290         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2291         self.force_reject(reject_files)
2292
2293         # If we fail here someone is probably trying to exploit the race
2294         # so let's just raise an exception ...
2295         if os.path.exists(reason_filename):
2296             os.unlink(reason_filename)
2297         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2298
2299         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2300
2301         self.update_subst()
2302         if not manual:
2303             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2304             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2305             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2306             os.write(reason_fd, reject_message)
2307             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2308         else:
2309             # Build up the rejection email
2310             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2311             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2312             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2313             self.Subst["__REJECT_MESSAGE__"] = ""
2314             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2315             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2316             # Write the rejection email out as the <foo>.reason file
2317             os.write(reason_fd, reject_mail_message)
2318
2319         del self.Subst["__REJECTOR_ADDRESS__"]
2320         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2321         del self.Subst["__CC__"]
2322
2323         os.close(reason_fd)
2324
2325         # Send the rejection mail
2326         utils.send_mail(reject_mail_message)
2327
2328         if self.logger:
2329             self.logger.log(["rejected", self.pkg.changes_file])
2330
2331         return 0
2332
2333     ################################################################################
2334     def in_override_p(self, package, component, suite, binary_type, filename, session):
2335         """
2336         Check if a package already has override entries in the DB
2337
2338         @type package: string
2339         @param package: package name
2340
2341         @type component: string
2342         @param component: database id of the component
2343
2344         @type suite: int
2345         @param suite: database id of the suite
2346
2347         @type binary_type: string
2348         @param binary_type: type of the package
2349
2350         @type filename: string
2351         @param filename: filename we check
2352
2353         @return: the database result. But noone cares anyway.
2354
2355         """
2356
2357         cnf = Config()
2358
2359         if binary_type == "": # must be source
2360             file_type = "dsc"
2361         else:
2362             file_type = binary_type
2363
2364         # Override suite name; used for example with proposed-updates
2365         if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2366             suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2367
2368         result = get_override(package, suite, component, file_type, session)
2369
2370         # If checking for a source package fall back on the binary override type
2371         if file_type == "dsc" and len(result) < 1:
2372             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2373
2374         # Remember the section and priority so we can check them later if appropriate
2375         if len(result) > 0:
2376             result = result[0]
2377             self.pkg.files[filename]["override section"] = result.section.section
2378             self.pkg.files[filename]["override priority"] = result.priority.priority
2379             return result
2380
2381         return None
2382
2383     ################################################################################
2384     def get_anyversion(self, sv_list, suite):
2385         """
2386         @type sv_list: list
2387         @param sv_list: list of (suite, version) tuples to check
2388
2389         @type suite: string
2390         @param suite: suite name
2391
2392         Description: TODO
2393         """
2394         Cnf = Config()
2395         anyversion = None
2396         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2397         for (s, v) in sv_list:
2398             if s in [ x.lower() for x in anysuite ]:
2399                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2400                     anyversion = v
2401
2402         return anyversion
2403
2404     ################################################################################
2405
2406     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2407         """
2408         @type sv_list: list
2409         @param sv_list: list of (suite, version) tuples to check
2410
2411         @type filename: string
2412         @param filename: XXX
2413
2414         @type new_version: string
2415         @param new_version: XXX
2416
2417         Ensure versions are newer than existing packages in target
2418         suites and that cross-suite version checking rules as
2419         set out in the conf file are satisfied.
2420         """
2421
2422         cnf = Config()
2423
2424         # Check versions for each target suite
2425         for target_suite in self.pkg.changes["distribution"].keys():
2426             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2427             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2428
2429             # Enforce "must be newer than target suite" even if conffile omits it
2430             if target_suite not in must_be_newer_than:
2431                 must_be_newer_than.append(target_suite)
2432
2433             for (suite, existent_version) in sv_list:
2434                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2435
2436                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2437                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2438
2439                 if suite in must_be_older_than and vercmp > -1:
2440                     cansave = 0
2441
2442                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2443                         # we really use the other suite, ignoring the conflicting one ...
2444                         addsuite = self.pkg.changes["distribution-version"][suite]
2445
2446                         add_version = self.get_anyversion(sv_list, addsuite)
2447                         target_version = self.get_anyversion(sv_list, target_suite)
2448
2449                         if not add_version:
2450                             # not add_version can only happen if we map to a suite
2451                             # that doesn't enhance the suite we're propup'ing from.
2452                             # so "propup-ver x a b c; map a d" is a problem only if
2453                             # d doesn't enhance a.
2454                             #
2455                             # i think we could always propagate in this case, rather
2456                             # than complaining. either way, this isn't a REJECT issue
2457                             #
2458                             # And - we really should complain to the dorks who configured dak
2459                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2460                             self.pkg.changes.setdefault("propdistribution", {})
2461                             self.pkg.changes["propdistribution"][addsuite] = 1
2462                             cansave = 1
2463                         elif not target_version:
2464                             # not targets_version is true when the package is NEW
2465                             # we could just stick with the "...old version..." REJECT
2466                             # for this, I think.
2467                             self.rejects.append("Won't propogate NEW packages.")
2468                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2469                             # propogation would be redundant. no need to reject though.
2470                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2471                             cansave = 1
2472                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2473                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2474                             # propogate!!
2475                             self.warnings.append("Propogating upload to %s" % (addsuite))
2476                             self.pkg.changes.setdefault("propdistribution", {})
2477                             self.pkg.changes["propdistribution"][addsuite] = 1
2478                             cansave = 1
2479
2480                     if not cansave:
2481                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2482
2483     ################################################################################
2484     def check_binary_against_db(self, filename, session):
2485         # Ensure version is sane
2486         q = session.query(BinAssociation)
2487         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2488         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2489
2490         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2491                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2492
2493         # Check for any existing copies of the file
2494         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2495         q = q.filter_by(version=self.pkg.files[filename]["version"])
2496         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2497
2498         if q.count() > 0:
2499             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2500
2501     ################################################################################
2502
2503     def check_source_against_db(self, filename, session):
2504         source = self.pkg.dsc.get("source")
2505         version = self.pkg.dsc.get("version")
2506
2507         # Ensure version is sane
2508         q = session.query(SrcAssociation)
2509         q = q.join(DBSource).filter(DBSource.source==source)
2510
2511         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2512                                        filename, version, sourceful=True)
2513
2514     ################################################################################
2515     def check_dsc_against_db(self, filename, session):
2516         """
2517
2518         @warning: NB: this function can remove entries from the 'files' index [if
2519          the orig tarball is a duplicate of the one in the archive]; if
2520          you're iterating over 'files' and call this function as part of
2521          the loop, be sure to add a check to the top of the loop to
2522          ensure you haven't just tried to dereference the deleted entry.
2523
2524         """
2525
2526         Cnf = Config()
2527         self.pkg.orig_files = {} # XXX: do we need to clear it?
2528         orig_files = self.pkg.orig_files
2529
2530         # Try and find all files mentioned in the .dsc.  This has
2531         # to work harder to cope with the multiple possible
2532         # locations of an .orig.tar.gz.
2533         # The ordering on the select is needed to pick the newest orig
2534         # when it exists in multiple places.
2535         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2536             found = None
2537             if self.pkg.files.has_key(dsc_name):
2538                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2539                 actual_size = int(self.pkg.files[dsc_name]["size"])
2540                 found = "%s in incoming" % (dsc_name)
2541
2542                 # Check the file does not already exist in the archive
2543                 ql = get_poolfile_like_name(dsc_name, session)
2544
2545                 # Strip out anything that isn't '%s' or '/%s$'
2546                 for i in ql:
2547                     if not i.filename.endswith(dsc_name):
2548                         ql.remove(i)
2549
2550                 # "[dak] has not broken them.  [dak] has fixed a
2551                 # brokenness.  Your crappy hack exploited a bug in
2552                 # the old dinstall.
2553                 #
2554                 # "(Come on!  I thought it was always obvious that
2555                 # one just doesn't release different files with
2556                 # the same name and version.)"
2557                 #                        -- ajk@ on d-devel@l.d.o
2558
2559                 if len(ql) > 0:
2560                     # Ignore exact matches for .orig.tar.gz
2561                     match = 0
2562                     if re_is_orig_source.match(dsc_name):
2563                         for i in ql:
2564                             if self.pkg.files.has_key(dsc_name) and \
2565                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2566                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2567                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2568                                 # TODO: Don't delete the entry, just mark it as not needed
2569                                 # This would fix the stupidity of changing something we often iterate over
2570                                 # whilst we're doing it
2571                                 del self.pkg.files[dsc_name]
2572                                 dsc_entry["files id"] = i.file_id
2573                                 if not orig_files.has_key(dsc_name):
2574                                     orig_files[dsc_name] = {}
2575                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2576                                 match = 1
2577
2578                                 # Don't bitch that we couldn't find this file later
2579                                 try:
2580                                     self.later_check_files.remove(dsc_name)
2581                                 except ValueError:
2582                                     pass
2583
2584
2585                     if not match:
2586                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2587
2588             elif re_is_orig_source.match(dsc_name):
2589                 # Check in the pool
2590                 ql = get_poolfile_like_name(dsc_name, session)
2591
2592                 # Strip out anything that isn't '%s' or '/%s$'
2593                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2594                 for i in ql:
2595                     if not i.filename.endswith(dsc_name):
2596                         ql.remove(i)
2597
2598                 if len(ql) > 0:
2599                     # Unfortunately, we may get more than one match here if,
2600                     # for example, the package was in potato but had an -sa
2601                     # upload in woody.  So we need to choose the right one.
2602
2603                     # default to something sane in case we don't match any or have only one
2604                     x = ql[0]
2605
2606                     if len(ql) > 1:
2607                         for i in ql:
2608                             old_file = os.path.join(i.location.path, i.filename)
2609                             old_file_fh = utils.open_file(old_file)
2610                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2611                             old_file_fh.close()
2612                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2613                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2614                                 x = i
2615
2616                     old_file = os.path.join(i.location.path, i.filename)
2617                     old_file_fh = utils.open_file(old_file)
2618                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2619                     old_file_fh.close()
2620                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2621                     found = old_file
2622                     suite_type = x.location.archive_type
2623                     # need this for updating dsc_files in install()
2624                     dsc_entry["files id"] = x.file_id
2625                     # See install() in process-accepted...
2626                     if not orig_files.has_key(dsc_name):
2627                         orig_files[dsc_name] = {}
2628                     orig_files[dsc_name]["id"] = x.file_id
2629                     orig_files[dsc_name]["path"] = old_file
2630                     orig_files[dsc_name]["location"] = x.location.location_id
2631                 else:
2632                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2633                     # Not there? Check the queue directories...
2634                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2635                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2636                             continue
2637                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2638                         if os.path.exists(in_otherdir):
2639                             in_otherdir_fh = utils.open_file(in_otherdir)
2640                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2641                             in_otherdir_fh.close()
2642                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2643                             found = in_otherdir
2644                             if not orig_files.has_key(dsc_name):
2645                                 orig_files[dsc_name] = {}
2646                             orig_files[dsc_name]["path"] = in_otherdir
2647
2648                     if not found:
2649                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2650                         continue
2651             else:
2652                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2653                 continue
2654             if actual_md5 != dsc_entry["md5sum"]:
2655                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2656             if actual_size != int(dsc_entry["size"]):
2657                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2658
2659     ################################################################################
2660     # This is used by process-new and process-holding to recheck a changes file
2661     # at the time we're running.  It mainly wraps various other internal functions
2662     # and is similar to accepted_checks - these should probably be tidied up
2663     # and combined
2664     def recheck(self, session):
2665         cnf = Config()
2666         for f in self.pkg.files.keys():
2667             # The .orig.tar.gz can disappear out from under us is it's a
2668             # duplicate of one in the archive.
2669             if not self.pkg.files.has_key(f):
2670                 continue
2671
2672             entry = self.pkg.files[f]
2673
2674             # Check that the source still exists
2675             if entry["type"] == "deb":
2676                 source_version = entry["source version"]
2677                 source_package = entry["source package"]
2678                 if not self.pkg.changes["architecture"].has_key("source") \
2679                    and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2680                     source_epochless_version = re_no_epoch.sub('', source_version)
2681                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2682                     found = False
2683                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2684                         if cnf.has_key("Dir::Queue::%s" % (q)):
2685                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2686                                 found = True
2687                     if not found:
2688                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2689
2690             # Version and file overwrite checks
2691             if entry["type"] == "deb":
2692                 self.check_binary_against_db(f, session)
2693             elif entry["type"] == "dsc":
2694                 self.check_source_against_db(f, session)
2695                 self.check_dsc_against_db(f, session)
2696
2697     ################################################################################
2698     def accepted_checks(self, overwrite_checks, session):
2699         # Recheck anything that relies on the database; since that's not
2700         # frozen between accept and our run time when called from p-a.
2701
2702         # overwrite_checks is set to False when installing to stable/oldstable
2703
2704         propogate={}
2705         nopropogate={}
2706
2707         # Find the .dsc (again)
2708         dsc_filename = None
2709         for f in self.pkg.files.keys():
2710             if self.pkg.files[f]["type"] == "dsc":
2711                 dsc_filename = f
2712
2713         for checkfile in self.pkg.files.keys():
2714             # The .orig.tar.gz can disappear out from under us is it's a
2715             # duplicate of one in the archive.
2716             if not self.pkg.files.has_key(checkfile):
2717                 continue
2718
2719             entry = self.pkg.files[checkfile]
2720
2721             # Check that the source still exists
2722             if entry["type"] == "deb":
2723                 source_version = entry["source version"]
2724                 source_package = entry["source package"]
2725                 if not self.pkg.changes["architecture"].has_key("source") \
2726                    and not source_exists(source_package, source_version,  self.pkg.changes["distribution"].keys()):
2727                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2728
2729             # Version and file overwrite checks
2730             if overwrite_checks:
2731                 if entry["type"] == "deb":
2732                     self.check_binary_against_db(checkfile, session)
2733                 elif entry["type"] == "dsc":
2734                     self.check_source_against_db(checkfile, session)
2735                     self.check_dsc_against_db(dsc_filename, session)
2736
2737             # propogate in the case it is in the override tables:
2738             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2739                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2740                     propogate[suite] = 1
2741                 else:
2742                     nopropogate[suite] = 1
2743
2744         for suite in propogate.keys():
2745             if suite in nopropogate:
2746                 continue
2747             self.pkg.changes["distribution"][suite] = 1
2748
2749         for checkfile in self.pkg.files.keys():
2750             # Check the package is still in the override tables
2751             for suite in self.pkg.changes["distribution"].keys():
2752                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2753                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2754
2755     ################################################################################
2756     # If any file of an upload has a recent mtime then chances are good
2757     # the file is still being uploaded.
2758
2759     def upload_too_new(self):
2760         cnf = Config()
2761         too_new = False
2762         # Move back to the original directory to get accurate time stamps
2763         cwd = os.getcwd()
2764         os.chdir(self.pkg.directory)
2765         file_list = self.pkg.files.keys()
2766         file_list.extend(self.pkg.dsc_files.keys())
2767         file_list.append(self.pkg.changes_file)
2768         for f in file_list:
2769             try:
2770                 last_modified = time.time()-os.path.getmtime(f)
2771                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2772                     too_new = True
2773                     break
2774             except:
2775                 pass
2776
2777         os.chdir(cwd)
2778         return too_new
2779
2780     def store_changelog(self):
2781
2782         # Skip binary-only upload if it is not a bin-NMU
2783         if not self.pkg.changes['architecture'].has_key('source'):
2784             from daklib.regexes import re_bin_only_nmu
2785             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2786                 return
2787
2788         session = DBConn().session()
2789
2790         # Check if upload already has a changelog entry
2791         query = """SELECT changelog_id FROM changes WHERE source = :source
2792                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2793         if session.execute(query, {'source': self.pkg.changes['source'], \
2794                                    'version': self.pkg.changes['version'], \
2795                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2796             session.commit()
2797             return
2798
2799         # Add current changelog text into changelogs_text table, return created ID
2800         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2801         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2802
2803         # Link ID to the upload available in changes table
2804         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2805                    AND version = :version AND architecture = :architecture"""
2806         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2807                                 'version': self.pkg.changes['version'], \
2808                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2809
2810         session.commit()