]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
process-new: allow overrides to work for Package-Set
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @type dsc: Upload.Pkg.dsc dict
122     @param dsc: (optional); Dsc dictionary
123
124     @type new: dict
125     @param new: new packages as returned by a previous call to this function, but override information may have changed
126
127     @rtype: dict
128     @return: dictionary of NEW components.
129
130     """
131     # TODO: This should all use the database instead of parsing the changes
132     # file again
133     byhand = {}
134
135     dbchg = get_dbchange(filename, session)
136     if dbchg is None:
137         print "Warning: cannot find changes file in database; won't check byhand"
138
139     # Try to get the Package-Set field from an included .dsc file (if possible).
140     if dsc:
141         for package, entry in build_package_set(dsc, session).items():
142             if not new.has_key(package):
143                 new[package] = entry
144
145     # Build up a list of potentially new things
146     for name, f in files.items():
147         # Keep a record of byhand elements
148         if f["section"] == "byhand":
149             byhand[name] = 1
150             continue
151
152         pkg = f["package"]
153         priority = f["priority"]
154         section = f["section"]
155         file_type = get_type(f, session)
156         component = f["component"]
157
158         if file_type == "dsc":
159             priority = "source"
160
161         if not new.has_key(pkg):
162             new[pkg] = {}
163             new[pkg]["priority"] = priority
164             new[pkg]["section"] = section
165             new[pkg]["type"] = file_type
166             new[pkg]["component"] = component
167             new[pkg]["files"] = []
168         else:
169             old_type = new[pkg]["type"]
170             if old_type != file_type:
171                 # source gets trumped by deb or udeb
172                 if old_type == "dsc":
173                     new[pkg]["priority"] = priority
174                     new[pkg]["section"] = section
175                     new[pkg]["type"] = file_type
176                     new[pkg]["component"] = component
177
178         new[pkg]["files"].append(name)
179
180         if f.has_key("othercomponents"):
181             new[pkg]["othercomponents"] = f["othercomponents"]
182
183     # Fix up the list of target suites
184     cnf = Config()
185     for suite in changes["suite"].keys():
186         oldsuite = get_suite(suite, session)
187         if not oldsuite:
188             print "WARNING: Invalid suite %s found" % suite
189             continue
190
191         if oldsuite.overridesuite:
192             newsuite = get_suite(oldsuite.overridesuite, session)
193
194             if newsuite:
195                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
196                     oldsuite.overridesuite, suite)
197                 del changes["suite"][suite]
198                 changes["suite"][oldsuite.overridesuite] = 1
199             else:
200                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
201                     oldsuite.overridesuite, suite)
202
203     # Check for unprocessed byhand files
204     if dbchg is not None:
205         for b in byhand.keys():
206             # Find the file entry in the database
207             found = False
208             for f in dbchg.files:
209                 if f.filename == b:
210                     found = True
211                     # If it's processed, we can ignore it
212                     if f.processed:
213                         del byhand[b]
214                     break
215
216             if not found:
217                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
218
219     # Check for new stuff
220     for suite in changes["suite"].keys():
221         for pkg in new.keys():
222             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
223             if len(ql) > 0:
224                 for file_entry in new[pkg]["files"]:
225                     if files[file_entry].has_key("new"):
226                         del files[file_entry]["new"]
227                 del new[pkg]
228
229     if warn:
230         for s in ['stable', 'oldstable']:
231             if changes["suite"].has_key(s):
232                 print "WARNING: overrides will be added for %s!" % s
233         for pkg in new.keys():
234             if new[pkg].has_key("othercomponents"):
235                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
236
237     return new, byhand
238
239 ################################################################################
240
241 def check_valid(new, session = None):
242     """
243     Check if section and priority for NEW packages exist in database.
244     Additionally does sanity checks:
245       - debian-installer packages have to be udeb (or source)
246       - non debian-installer packages can not be udeb
247       - source priority can only be assigned to dsc file types
248
249     @type new: dict
250     @param new: Dict of new packages with their section, priority and type.
251
252     """
253     for pkg in new.keys():
254         section_name = new[pkg]["section"]
255         priority_name = new[pkg]["priority"]
256         file_type = new[pkg]["type"]
257
258         section = get_section(section_name, session)
259         if section is None:
260             new[pkg]["section id"] = -1
261         else:
262             new[pkg]["section id"] = section.section_id
263
264         priority = get_priority(priority_name, session)
265         if priority is None:
266             new[pkg]["priority id"] = -1
267         else:
268             new[pkg]["priority id"] = priority.priority_id
269
270         # Sanity checks
271         di = section_name.find("debian-installer") != -1
272
273         # If d-i, we must be udeb and vice-versa
274         if     (di and file_type not in ("udeb", "dsc")) or \
275            (not di and file_type == "udeb"):
276             new[pkg]["section id"] = -1
277
278         # If dsc we need to be source and vice-versa
279         if (priority == "source" and file_type != "dsc") or \
280            (priority != "source" and file_type == "dsc"):
281             new[pkg]["priority id"] = -1
282
283 ###############################################################################
284
285 # Used by Upload.check_timestamps
286 class TarTime(object):
287     def __init__(self, future_cutoff, past_cutoff):
288         self.reset()
289         self.future_cutoff = future_cutoff
290         self.past_cutoff = past_cutoff
291
292     def reset(self):
293         self.future_files = {}
294         self.ancient_files = {}
295
296     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
297         if MTime > self.future_cutoff:
298             self.future_files[Name] = MTime
299         if MTime < self.past_cutoff:
300             self.ancient_files[Name] = MTime
301
302 ###############################################################################
303
304 def prod_maintainer(notes, upload):
305     cnf = Config()
306
307     # Here we prepare an editor and get them ready to prod...
308     (fd, temp_filename) = utils.temp_filename()
309     temp_file = os.fdopen(fd, 'w')
310     for note in notes:
311         temp_file.write(note.comment)
312     temp_file.close()
313     editor = os.environ.get("EDITOR","vi")
314     answer = 'E'
315     while answer == 'E':
316         os.system("%s %s" % (editor, temp_filename))
317         temp_fh = utils.open_file(temp_filename)
318         prod_message = "".join(temp_fh.readlines())
319         temp_fh.close()
320         print "Prod message:"
321         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
322         prompt = "[P]rod, Edit, Abandon, Quit ?"
323         answer = "XXX"
324         while prompt.find(answer) == -1:
325             answer = utils.our_raw_input(prompt)
326             m = re_default_answer.search(prompt)
327             if answer == "":
328                 answer = m.group(1)
329             answer = answer[:1].upper()
330     os.unlink(temp_filename)
331     if answer == 'A':
332         return
333     elif answer == 'Q':
334         end()
335         sys.exit(0)
336     # Otherwise, do the proding...
337     user_email_address = utils.whoami() + " <%s>" % (
338         cnf["Dinstall::MyAdminAddress"])
339
340     Subst = upload.Subst
341
342     Subst["__FROM_ADDRESS__"] = user_email_address
343     Subst["__PROD_MESSAGE__"] = prod_message
344     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
345
346     prod_mail_message = utils.TemplateSubst(
347         Subst,cnf["Dir::Templates"]+"/process-new.prod")
348
349     # Send the prod mail
350     utils.send_mail(prod_mail_message)
351
352     print "Sent prodding message"
353
354 ################################################################################
355
356 def edit_note(note, upload, session, trainee=False):
357     # Write the current data to a temporary file
358     (fd, temp_filename) = utils.temp_filename()
359     editor = os.environ.get("EDITOR","vi")
360     answer = 'E'
361     while answer == 'E':
362         os.system("%s %s" % (editor, temp_filename))
363         temp_file = utils.open_file(temp_filename)
364         newnote = temp_file.read().rstrip()
365         temp_file.close()
366         print "New Note:"
367         print utils.prefix_multi_line_string(newnote,"  ")
368         prompt = "[D]one, Edit, Abandon, Quit ?"
369         answer = "XXX"
370         while prompt.find(answer) == -1:
371             answer = utils.our_raw_input(prompt)
372             m = re_default_answer.search(prompt)
373             if answer == "":
374                 answer = m.group(1)
375             answer = answer[:1].upper()
376     os.unlink(temp_filename)
377     if answer == 'A':
378         return
379     elif answer == 'Q':
380         end()
381         sys.exit(0)
382
383     comment = NewComment()
384     comment.package = upload.pkg.changes["source"]
385     comment.version = upload.pkg.changes["version"]
386     comment.comment = newnote
387     comment.author  = utils.whoami()
388     comment.trainee = trainee
389     session.add(comment)
390     session.commit()
391
392 ###############################################################################
393
394 # suite names DMs can upload to
395 dm_suites = ['unstable', 'experimental']
396
397 def get_newest_source(source, session):
398     'returns the newest DBSource object in dm_suites'
399     ## the most recent version of the package uploaded to unstable or
400     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
401     ## section of its control file
402     q = session.query(DBSource).filter_by(source = source). \
403         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
404         order_by(desc('source.version'))
405     return q.first()
406
407 def get_suite_version_by_source(source, session):
408     'returns a list of tuples (suite_name, version) for source package'
409     q = session.query(Suite.suite_name, DBSource.version). \
410         join(Suite.sources).filter_by(source = source)
411     return q.all()
412
413 def get_source_by_package_and_suite(package, suite_name, session):
414     '''
415     returns a DBSource query filtered by DBBinary.package and this package's
416     suite_name
417     '''
418     return session.query(DBSource). \
419         join(DBSource.binaries).filter_by(package = package). \
420         join(DBBinary.suites).filter_by(suite_name = suite_name)
421
422 def get_suite_version_by_package(package, arch_string, session):
423     '''
424     returns a list of tuples (suite_name, version) for binary package and
425     arch_string
426     '''
427     return session.query(Suite.suite_name, DBBinary.version). \
428         join(Suite.binaries).filter_by(package = package). \
429         join(DBBinary.architecture). \
430         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
431
432 class Upload(object):
433     """
434     Everything that has to do with an upload processed.
435
436     """
437     def __init__(self):
438         self.logger = None
439         self.pkg = Changes()
440         self.reset()
441
442     ###########################################################################
443
444     def reset (self):
445         """ Reset a number of internal variables."""
446
447         # Initialize the substitution template map
448         cnf = Config()
449         self.Subst = {}
450         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
451         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
452         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
453         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
454
455         self.rejects = []
456         self.warnings = []
457         self.notes = []
458
459         self.later_check_files = []
460
461         self.pkg.reset()
462
463     def package_info(self):
464         """
465         Format various messages from this Upload to send to the maintainer.
466         """
467
468         msgs = (
469             ('Reject Reasons', self.rejects),
470             ('Warnings', self.warnings),
471             ('Notes', self.notes),
472         )
473
474         msg = ''
475         for title, messages in msgs:
476             if messages:
477                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
478         msg += '\n\n'
479
480         return msg
481
482     ###########################################################################
483     def update_subst(self):
484         """ Set up the per-package template substitution mappings """
485
486         cnf = Config()
487
488         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
489         if not self.pkg.changes.has_key("architecture") or not \
490            isinstance(self.pkg.changes["architecture"], dict):
491             self.pkg.changes["architecture"] = { "Unknown" : "" }
492
493         # and maintainer2047 may not exist.
494         if not self.pkg.changes.has_key("maintainer2047"):
495             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
496
497         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
498         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
499         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
500
501         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
502         if self.pkg.changes["architecture"].has_key("source") and \
503            self.pkg.changes["changedby822"] != "" and \
504            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
505
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
507             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
509         else:
510             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
511             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
512             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
513
514         # Process policy doesn't set the fingerprint field and I don't want to make it
515         # do it for now as I don't want to have to deal with the case where we accepted
516         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
517         # the meantime so the package will be remarked as rejectable.  Urgh.
518         # TODO: Fix this properly
519         if self.pkg.changes.has_key('fingerprint'):
520             session = DBConn().session()
521             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
522             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
523                 if self.pkg.changes.has_key("sponsoremail"):
524                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
525             session.close()
526
527         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
528             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
529
530         # Apply any global override of the Maintainer field
531         if cnf.get("Dinstall::OverrideMaintainer"):
532             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
533             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
534
535         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
536         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
537         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
538         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
539
540     ###########################################################################
541     def load_changes(self, filename):
542         """
543         Load a changes file and setup a dictionary around it. Also checks for mandantory
544         fields  within.
545
546         @type filename: string
547         @param filename: Changes filename, full path.
548
549         @rtype: boolean
550         @return: whether the changes file was valid or not.  We may want to
551                  reject even if this is True (see what gets put in self.rejects).
552                  This is simply to prevent us even trying things later which will
553                  fail because we couldn't properly parse the file.
554         """
555         Cnf = Config()
556         self.pkg.changes_file = filename
557
558         # Parse the .changes field into a dictionary
559         try:
560             self.pkg.changes.update(parse_changes(filename))
561         except CantOpenError:
562             self.rejects.append("%s: can't read file." % (filename))
563             return False
564         except ParseChangesError, line:
565             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
566             return False
567         except ChangesUnicodeError:
568             self.rejects.append("%s: changes file not proper utf-8" % (filename))
569             return False
570
571         # Parse the Files field from the .changes into another dictionary
572         try:
573             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
574         except ParseChangesError, line:
575             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
576             return False
577         except UnknownFormatError, format:
578             self.rejects.append("%s: unknown format '%s'." % (filename, format))
579             return False
580
581         # Check for mandatory fields
582         for i in ("distribution", "source", "binary", "architecture",
583                   "version", "maintainer", "files", "changes", "description"):
584             if not self.pkg.changes.has_key(i):
585                 # Avoid undefined errors later
586                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
587                 return False
588
589         # Strip a source version in brackets from the source field
590         if re_strip_srcver.search(self.pkg.changes["source"]):
591             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
592
593         # Ensure the source field is a valid package name.
594         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
595             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
596
597         # Split multi-value fields into a lower-level dictionary
598         for i in ("architecture", "distribution", "binary", "closes"):
599             o = self.pkg.changes.get(i, "")
600             if o != "":
601                 del self.pkg.changes[i]
602
603             self.pkg.changes[i] = {}
604
605             for j in o.split():
606                 self.pkg.changes[i][j] = 1
607
608         # Fix the Maintainer: field to be RFC822/2047 compatible
609         try:
610             (self.pkg.changes["maintainer822"],
611              self.pkg.changes["maintainer2047"],
612              self.pkg.changes["maintainername"],
613              self.pkg.changes["maintaineremail"]) = \
614                    fix_maintainer (self.pkg.changes["maintainer"])
615         except ParseMaintError, msg:
616             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
617                    % (filename, self.pkg.changes["maintainer"], msg))
618
619         # ...likewise for the Changed-By: field if it exists.
620         try:
621             (self.pkg.changes["changedby822"],
622              self.pkg.changes["changedby2047"],
623              self.pkg.changes["changedbyname"],
624              self.pkg.changes["changedbyemail"]) = \
625                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
626         except ParseMaintError, msg:
627             self.pkg.changes["changedby822"] = ""
628             self.pkg.changes["changedby2047"] = ""
629             self.pkg.changes["changedbyname"] = ""
630             self.pkg.changes["changedbyemail"] = ""
631
632             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
633                    % (filename, self.pkg.changes["changed-by"], msg))
634
635         # Ensure all the values in Closes: are numbers
636         if self.pkg.changes.has_key("closes"):
637             for i in self.pkg.changes["closes"].keys():
638                 if re_isanum.match (i) == None:
639                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
640
641         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
642         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
643         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
644
645         # Check the .changes is non-empty
646         if not self.pkg.files:
647             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
648             return False
649
650         # Changes was syntactically valid even if we'll reject
651         return True
652
653     ###########################################################################
654
655     def check_distributions(self):
656         "Check and map the Distribution field"
657
658         Cnf = Config()
659
660         # Handle suite mappings
661         for m in Cnf.ValueList("SuiteMappings"):
662             args = m.split()
663             mtype = args[0]
664             if mtype == "map" or mtype == "silent-map":
665                 (source, dest) = args[1:3]
666                 if self.pkg.changes["distribution"].has_key(source):
667                     del self.pkg.changes["distribution"][source]
668                     self.pkg.changes["distribution"][dest] = 1
669                     if mtype != "silent-map":
670                         self.notes.append("Mapping %s to %s." % (source, dest))
671                 if self.pkg.changes.has_key("distribution-version"):
672                     if self.pkg.changes["distribution-version"].has_key(source):
673                         self.pkg.changes["distribution-version"][source]=dest
674             elif mtype == "map-unreleased":
675                 (source, dest) = args[1:3]
676                 if self.pkg.changes["distribution"].has_key(source):
677                     for arch in self.pkg.changes["architecture"].keys():
678                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
679                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
680                             del self.pkg.changes["distribution"][source]
681                             self.pkg.changes["distribution"][dest] = 1
682                             break
683             elif mtype == "ignore":
684                 suite = args[1]
685                 if self.pkg.changes["distribution"].has_key(suite):
686                     del self.pkg.changes["distribution"][suite]
687                     self.warnings.append("Ignoring %s as a target suite." % (suite))
688             elif mtype == "reject":
689                 suite = args[1]
690                 if self.pkg.changes["distribution"].has_key(suite):
691                     self.rejects.append("Uploads to %s are not accepted." % (suite))
692             elif mtype == "propup-version":
693                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
694                 #
695                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
696                 if self.pkg.changes["distribution"].has_key(args[1]):
697                     self.pkg.changes.setdefault("distribution-version", {})
698                     for suite in args[2:]:
699                         self.pkg.changes["distribution-version"][suite] = suite
700
701         # Ensure there is (still) a target distribution
702         if len(self.pkg.changes["distribution"].keys()) < 1:
703             self.rejects.append("No valid distribution remaining.")
704
705         # Ensure target distributions exist
706         for suite in self.pkg.changes["distribution"].keys():
707             if not Cnf.has_key("Suite::%s" % (suite)):
708                 self.rejects.append("Unknown distribution `%s'." % (suite))
709
710     ###########################################################################
711
712     def binary_file_checks(self, f, session):
713         cnf = Config()
714         entry = self.pkg.files[f]
715
716         # Extract package control information
717         deb_file = utils.open_file(f)
718         try:
719             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
720         except:
721             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
722             deb_file.close()
723             # Can't continue, none of the checks on control would work.
724             return
725
726         # Check for mandantory "Description:"
727         deb_file.seek(0)
728         try:
729             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
730         except:
731             self.rejects.append("%s: Missing Description in binary package" % (f))
732             return
733
734         deb_file.close()
735
736         # Check for mandatory fields
737         for field in [ "Package", "Architecture", "Version" ]:
738             if control.Find(field) == None:
739                 # Can't continue
740                 self.rejects.append("%s: No %s field in control." % (f, field))
741                 return
742
743         # Ensure the package name matches the one give in the .changes
744         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
745             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
746
747         # Validate the package field
748         package = control.Find("Package")
749         if not re_valid_pkg_name.match(package):
750             self.rejects.append("%s: invalid package name '%s'." % (f, package))
751
752         # Validate the version field
753         version = control.Find("Version")
754         if not re_valid_version.match(version):
755             self.rejects.append("%s: invalid version number '%s'." % (f, version))
756
757         # Ensure the architecture of the .deb is one we know about.
758         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
759         architecture = control.Find("Architecture")
760         upload_suite = self.pkg.changes["distribution"].keys()[0]
761
762         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
763             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
764             self.rejects.append("Unknown architecture '%s'." % (architecture))
765
766         # Ensure the architecture of the .deb is one of the ones
767         # listed in the .changes.
768         if not self.pkg.changes["architecture"].has_key(architecture):
769             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
770
771         # Sanity-check the Depends field
772         depends = control.Find("Depends")
773         if depends == '':
774             self.rejects.append("%s: Depends field is empty." % (f))
775
776         # Sanity-check the Provides field
777         provides = control.Find("Provides")
778         if provides:
779             provide = re_spacestrip.sub('', provides)
780             if provide == '':
781                 self.rejects.append("%s: Provides field is empty." % (f))
782             prov_list = provide.split(",")
783             for prov in prov_list:
784                 if not re_valid_pkg_name.match(prov):
785                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
786
787         # If there is a Built-Using field, we need to check we can find the
788         # exact source version
789         built_using = control.Find("Built-Using")
790         if built_using:
791             try:
792                 entry["built-using"] = []
793                 for dep in apt_pkg.parse_depends(built_using):
794                     bu_s, bu_v, bu_e = dep[0]
795                     # Check that it's an exact match dependency and we have
796                     # some form of version
797                     if bu_e != "=" or len(bu_v) < 1:
798                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
799                     else:
800                         # Find the source id for this version
801                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
802                         if len(bu_so) != 1:
803                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
804                         else:
805                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
806
807             except ValueError, e:
808                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
809
810
811         # Check the section & priority match those given in the .changes (non-fatal)
812         if     control.Find("Section") and entry["section"] != "" \
813            and entry["section"] != control.Find("Section"):
814             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Section", ""), entry["section"]))
816         if control.Find("Priority") and entry["priority"] != "" \
817            and entry["priority"] != control.Find("Priority"):
818             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
819                                 (f, control.Find("Priority", ""), entry["priority"]))
820
821         entry["package"] = package
822         entry["architecture"] = architecture
823         entry["version"] = version
824         entry["maintainer"] = control.Find("Maintainer", "")
825
826         if f.endswith(".udeb"):
827             self.pkg.files[f]["dbtype"] = "udeb"
828         elif f.endswith(".deb"):
829             self.pkg.files[f]["dbtype"] = "deb"
830         else:
831             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
832
833         entry["source"] = control.Find("Source", entry["package"])
834
835         # Get the source version
836         source = entry["source"]
837         source_version = ""
838
839         if source.find("(") != -1:
840             m = re_extract_src_version.match(source)
841             source = m.group(1)
842             source_version = m.group(2)
843
844         if not source_version:
845             source_version = self.pkg.files[f]["version"]
846
847         entry["source package"] = source
848         entry["source version"] = source_version
849
850         # Ensure the filename matches the contents of the .deb
851         m = re_isadeb.match(f)
852
853         #  package name
854         file_package = m.group(1)
855         if entry["package"] != file_package:
856             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
857                                 (f, file_package, entry["dbtype"], entry["package"]))
858         epochless_version = re_no_epoch.sub('', control.Find("Version"))
859
860         #  version
861         file_version = m.group(2)
862         if epochless_version != file_version:
863             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
864                                 (f, file_version, entry["dbtype"], epochless_version))
865
866         #  architecture
867         file_architecture = m.group(3)
868         if entry["architecture"] != file_architecture:
869             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
870                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
871
872         # Check for existent source
873         source_version = entry["source version"]
874         source_package = entry["source package"]
875         if self.pkg.changes["architecture"].has_key("source"):
876             if source_version != self.pkg.changes["version"]:
877                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
878                                     (source_version, f, self.pkg.changes["version"]))
879         else:
880             # Check in the SQL database
881             if not source_exists(source_package, source_version, suites = \
882                 self.pkg.changes["distribution"].keys(), session = session):
883                 # Check in one of the other directories
884                 source_epochless_version = re_no_epoch.sub('', source_version)
885                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
886                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
887                     entry["byhand"] = 1
888                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
889                     entry["new"] = 1
890                 else:
891                     dsc_file_exists = False
892                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
893                         if cnf.has_key("Dir::Queue::%s" % (myq)):
894                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
895                                 dsc_file_exists = True
896                                 break
897
898                     if not dsc_file_exists:
899                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
900
901         # Check the version and for file overwrites
902         self.check_binary_against_db(f, session)
903
904     def source_file_checks(self, f, session):
905         entry = self.pkg.files[f]
906
907         m = re_issource.match(f)
908         if not m:
909             return
910
911         entry["package"] = m.group(1)
912         entry["version"] = m.group(2)
913         entry["type"] = m.group(3)
914
915         # Ensure the source package name matches the Source filed in the .changes
916         if self.pkg.changes["source"] != entry["package"]:
917             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
918
919         # Ensure the source version matches the version in the .changes file
920         if re_is_orig_source.match(f):
921             changes_version = self.pkg.changes["chopversion2"]
922         else:
923             changes_version = self.pkg.changes["chopversion"]
924
925         if changes_version != entry["version"]:
926             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
927
928         # Ensure the .changes lists source in the Architecture field
929         if not self.pkg.changes["architecture"].has_key("source"):
930             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
931
932         # Check the signature of a .dsc file
933         if entry["type"] == "dsc":
934             # check_signature returns either:
935             #  (None, [list, of, rejects]) or (signature, [])
936             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
937             for j in rejects:
938                 self.rejects.append(j)
939
940         entry["architecture"] = "source"
941
942     def per_suite_file_checks(self, f, suite, session):
943         cnf = Config()
944         entry = self.pkg.files[f]
945
946         # Skip byhand
947         if entry.has_key("byhand"):
948             return
949
950         # Check we have fields we need to do these checks
951         oktogo = True
952         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
953             if not entry.has_key(m):
954                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
955                 oktogo = False
956
957         if not oktogo:
958             return
959
960         # Handle component mappings
961         for m in cnf.ValueList("ComponentMappings"):
962             (source, dest) = m.split()
963             if entry["component"] == source:
964                 entry["original component"] = source
965                 entry["component"] = dest
966
967         # Ensure the component is valid for the target suite
968         if cnf.has_key("Suite:%s::Components" % (suite)) and \
969            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
970             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
971             return
972
973         # Validate the component
974         if not get_component(entry["component"], session):
975             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
976             return
977
978         # See if the package is NEW
979         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
980             entry["new"] = 1
981
982         # Validate the priority
983         if entry["priority"].find('/') != -1:
984             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
985
986         # Determine the location
987         location = cnf["Dir::Pool"]
988         l = get_location(location, entry["component"], session=session)
989         if l is None:
990             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
991             entry["location id"] = -1
992         else:
993             entry["location id"] = l.location_id
994
995         # Check the md5sum & size against existing files (if any)
996         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
997
998         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
999                                          entry["size"], entry["md5sum"], entry["location id"])
1000
1001         if found is None:
1002             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1003         elif found is False and poolfile is not None:
1004             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1005         else:
1006             if poolfile is None:
1007                 entry["files id"] = None
1008             else:
1009                 entry["files id"] = poolfile.file_id
1010
1011         # Check for packages that have moved from one component to another
1012         entry['suite'] = suite
1013         arch_list = [entry["architecture"], 'all']
1014         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1015             [suite], arch_list = arch_list, session = session)
1016         if component is not None:
1017             entry["othercomponents"] = component
1018
1019     def check_files(self, action=True):
1020         file_keys = self.pkg.files.keys()
1021         holding = Holding()
1022         cnf = Config()
1023
1024         if action:
1025             cwd = os.getcwd()
1026             os.chdir(self.pkg.directory)
1027             for f in file_keys:
1028                 ret = holding.copy_to_holding(f)
1029                 if ret is not None:
1030                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1031
1032             os.chdir(cwd)
1033
1034         # check we already know the changes file
1035         # [NB: this check must be done post-suite mapping]
1036         base_filename = os.path.basename(self.pkg.changes_file)
1037
1038         session = DBConn().session()
1039
1040         try:
1041             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1042             # if in the pool or in a queue other than unchecked, reject
1043             if (dbc.in_queue is None) \
1044                    or (dbc.in_queue is not None
1045                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1046                 self.rejects.append("%s file already known to dak" % base_filename)
1047         except NoResultFound, e:
1048             # not known, good
1049             pass
1050
1051         has_binaries = False
1052         has_source = False
1053
1054         for f, entry in self.pkg.files.items():
1055             # Ensure the file does not already exist in one of the accepted directories
1056             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1057                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1058                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1059                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1060
1061             if not re_taint_free.match(f):
1062                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1063
1064             # Check the file is readable
1065             if os.access(f, os.R_OK) == 0:
1066                 # When running in -n, copy_to_holding() won't have
1067                 # generated the reject_message, so we need to.
1068                 if action:
1069                     if os.path.exists(f):
1070                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1071                     else:
1072                         # Don't directly reject, mark to check later to deal with orig's
1073                         # we can find in the pool
1074                         self.later_check_files.append(f)
1075                 entry["type"] = "unreadable"
1076                 continue
1077
1078             # If it's byhand skip remaining checks
1079             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1080                 entry["byhand"] = 1
1081                 entry["type"] = "byhand"
1082
1083             # Checks for a binary package...
1084             elif re_isadeb.match(f):
1085                 has_binaries = True
1086                 entry["type"] = "deb"
1087
1088                 # This routine appends to self.rejects/warnings as appropriate
1089                 self.binary_file_checks(f, session)
1090
1091             # Checks for a source package...
1092             elif re_issource.match(f):
1093                 has_source = True
1094
1095                 # This routine appends to self.rejects/warnings as appropriate
1096                 self.source_file_checks(f, session)
1097
1098             # Not a binary or source package?  Assume byhand...
1099             else:
1100                 entry["byhand"] = 1
1101                 entry["type"] = "byhand"
1102
1103             # Per-suite file checks
1104             entry["oldfiles"] = {}
1105             for suite in self.pkg.changes["distribution"].keys():
1106                 self.per_suite_file_checks(f, suite, session)
1107
1108         session.close()
1109
1110         # If the .changes file says it has source, it must have source.
1111         if self.pkg.changes["architecture"].has_key("source"):
1112             if not has_source:
1113                 self.rejects.append("no source found and Architecture line in changes mention source.")
1114
1115             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1116                 self.rejects.append("source only uploads are not supported.")
1117
1118     ###########################################################################
1119
1120     def __dsc_filename(self):
1121         """
1122         Returns: (Status, Dsc_Filename)
1123         where
1124           Status: Boolean; True when there was no error, False otherwise
1125           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1126         """
1127         dsc_filename = None
1128
1129         # find the dsc
1130         for name, entry in self.pkg.files.items():
1131             if entry.has_key("type") and entry["type"] == "dsc":
1132                 if dsc_filename:
1133                     return False, "cannot process a .changes file with multiple .dsc's."
1134                 else:
1135                     dsc_filename = name
1136
1137         if not dsc_filename:
1138             return False, "source uploads must contain a dsc file"
1139
1140         return True, dsc_filename
1141
1142     def load_dsc(self, action=True, signing_rules=1):
1143         """
1144         Find and load the dsc from self.pkg.files into self.dsc
1145
1146         Returns: (Status, Reason)
1147         where
1148           Status: Boolean; True when there was no error, False otherwise
1149           Reason: String; When Status is False this describes the error
1150         """
1151
1152         # find the dsc
1153         (status, dsc_filename) = self.__dsc_filename()
1154         if not status:
1155             # If status is false, dsc_filename has the reason
1156             return False, dsc_filename
1157
1158         try:
1159             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1160         except CantOpenError:
1161             if not action:
1162                 return False, "%s: can't read file." % (dsc_filename)
1163         except ParseChangesError, line:
1164             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1165         except InvalidDscError, line:
1166             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1167         except ChangesUnicodeError:
1168             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1169
1170         return True, None
1171
1172     ###########################################################################
1173
1174     def check_dsc(self, action=True, session=None):
1175         """Returns bool indicating whether or not the source changes are valid"""
1176         # Ensure there is source to check
1177         if not self.pkg.changes["architecture"].has_key("source"):
1178             return True
1179
1180         (status, reason) = self.load_dsc(action=action)
1181         if not status:
1182             self.rejects.append(reason)
1183             return False
1184         (status, dsc_filename) = self.__dsc_filename()
1185         if not status:
1186             # If status is false, dsc_filename has the reason
1187             self.rejects.append(dsc_filename)
1188             return False
1189
1190         # Build up the file list of files mentioned by the .dsc
1191         try:
1192             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1193         except NoFilesFieldError:
1194             self.rejects.append("%s: no Files: field." % (dsc_filename))
1195             return False
1196         except UnknownFormatError, format:
1197             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1198             return False
1199         except ParseChangesError, line:
1200             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1201             return False
1202
1203         # Enforce mandatory fields
1204         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1205             if not self.pkg.dsc.has_key(i):
1206                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1207                 return False
1208
1209         # Validate the source and version fields
1210         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1211             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1212         if not re_valid_version.match(self.pkg.dsc["version"]):
1213             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1214
1215         # Only a limited list of source formats are allowed in each suite
1216         for dist in self.pkg.changes["distribution"].keys():
1217             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1218             if self.pkg.dsc["format"] not in allowed:
1219                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1220
1221         # Validate the Maintainer field
1222         try:
1223             # We ignore the return value
1224             fix_maintainer(self.pkg.dsc["maintainer"])
1225         except ParseMaintError, msg:
1226             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1227                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1228
1229         # Validate the build-depends field(s)
1230         for field_name in [ "build-depends", "build-depends-indep" ]:
1231             field = self.pkg.dsc.get(field_name)
1232             if field:
1233                 # Have apt try to parse them...
1234                 try:
1235                     apt_pkg.ParseSrcDepends(field)
1236                 except:
1237                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1238
1239         # Ensure the version number in the .dsc matches the version number in the .changes
1240         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1241         changes_version = self.pkg.files[dsc_filename]["version"]
1242
1243         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1244             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1245
1246         # Ensure the Files field contain only what's expected
1247         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1248
1249         # Ensure source is newer than existing source in target suites
1250         session = DBConn().session()
1251         self.check_source_against_db(dsc_filename, session)
1252         self.check_dsc_against_db(dsc_filename, session)
1253
1254         dbchg = get_dbchange(self.pkg.changes_file, session)
1255
1256         # Finally, check if we're missing any files
1257         for f in self.later_check_files:
1258             print 'XXX: %s' % f
1259             # Check if we've already processed this file if we have a dbchg object
1260             ok = False
1261             if dbchg:
1262                 for pf in dbchg.files:
1263                     if pf.filename == f and pf.processed:
1264                         self.notes.append('%s was already processed so we can go ahead' % f)
1265                         ok = True
1266                         del self.pkg.files[f]
1267             if not ok:
1268                 self.rejects.append("Could not find file %s references in changes" % f)
1269
1270         session.close()
1271
1272         return True
1273
1274     ###########################################################################
1275
1276     def get_changelog_versions(self, source_dir):
1277         """Extracts a the source package and (optionally) grabs the
1278         version history out of debian/changelog for the BTS."""
1279
1280         cnf = Config()
1281
1282         # Find the .dsc (again)
1283         dsc_filename = None
1284         for f in self.pkg.files.keys():
1285             if self.pkg.files[f]["type"] == "dsc":
1286                 dsc_filename = f
1287
1288         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1289         if not dsc_filename:
1290             return
1291
1292         # Create a symlink mirror of the source files in our temporary directory
1293         for f in self.pkg.files.keys():
1294             m = re_issource.match(f)
1295             if m:
1296                 src = os.path.join(source_dir, f)
1297                 # If a file is missing for whatever reason, give up.
1298                 if not os.path.exists(src):
1299                     return
1300                 ftype = m.group(3)
1301                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1302                    self.pkg.orig_files[f].has_key("path"):
1303                     continue
1304                 dest = os.path.join(os.getcwd(), f)
1305                 os.symlink(src, dest)
1306
1307         # If the orig files are not a part of the upload, create symlinks to the
1308         # existing copies.
1309         for orig_file in self.pkg.orig_files.keys():
1310             if not self.pkg.orig_files[orig_file].has_key("path"):
1311                 continue
1312             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1313             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1314
1315         # Extract the source
1316         try:
1317             unpacked = UnpackedSource(dsc_filename)
1318         except:
1319             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1320             return
1321
1322         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1323             return
1324
1325         # Get the upstream version
1326         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1327         if re_strip_revision.search(upstr_version):
1328             upstr_version = re_strip_revision.sub('', upstr_version)
1329
1330         # Ensure the changelog file exists
1331         changelog_file = unpacked.get_changelog_file()
1332         if changelog_file is None:
1333             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1334             return
1335
1336         # Parse the changelog
1337         self.pkg.dsc["bts changelog"] = ""
1338         for line in changelog_file.readlines():
1339             m = re_changelog_versions.match(line)
1340             if m:
1341                 self.pkg.dsc["bts changelog"] += line
1342         changelog_file.close()
1343         unpacked.cleanup()
1344
1345         # Check we found at least one revision in the changelog
1346         if not self.pkg.dsc["bts changelog"]:
1347             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1348
1349     def check_source(self):
1350         # Bail out if:
1351         #    a) there's no source
1352         if not self.pkg.changes["architecture"].has_key("source"):
1353             return
1354
1355         tmpdir = utils.temp_dirname()
1356
1357         # Move into the temporary directory
1358         cwd = os.getcwd()
1359         os.chdir(tmpdir)
1360
1361         # Get the changelog version history
1362         self.get_changelog_versions(cwd)
1363
1364         # Move back and cleanup the temporary tree
1365         os.chdir(cwd)
1366
1367         try:
1368             shutil.rmtree(tmpdir)
1369         except OSError, e:
1370             if e.errno != errno.EACCES:
1371                 print "foobar"
1372                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1373
1374             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1375             # We probably have u-r or u-w directories so chmod everything
1376             # and try again.
1377             cmd = "chmod -R u+rwx %s" % (tmpdir)
1378             result = os.system(cmd)
1379             if result != 0:
1380                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1381             shutil.rmtree(tmpdir)
1382         except Exception, e:
1383             print "foobar2 (%s)" % e
1384             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1385
1386     ###########################################################################
1387     def ensure_hashes(self):
1388         # Make sure we recognise the format of the Files: field in the .changes
1389         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1390         if len(format) == 2:
1391             format = int(format[0]), int(format[1])
1392         else:
1393             format = int(float(format[0])), 0
1394
1395         # We need to deal with the original changes blob, as the fields we need
1396         # might not be in the changes dict serialised into the .dak anymore.
1397         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1398
1399         # Copy the checksums over to the current changes dict.  This will keep
1400         # the existing modifications to it intact.
1401         for field in orig_changes:
1402             if field.startswith('checksums-'):
1403                 self.pkg.changes[field] = orig_changes[field]
1404
1405         # Check for unsupported hashes
1406         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1407             self.rejects.append(j)
1408
1409         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1410             self.rejects.append(j)
1411
1412         # We have to calculate the hash if we have an earlier changes version than
1413         # the hash appears in rather than require it exist in the changes file
1414         for hashname, hashfunc, version in utils.known_hashes:
1415             # TODO: Move _ensure_changes_hash into this class
1416             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1417                 self.rejects.append(j)
1418             if "source" in self.pkg.changes["architecture"]:
1419                 # TODO: Move _ensure_dsc_hash into this class
1420                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1421                     self.rejects.append(j)
1422
1423     def check_hashes(self):
1424         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1425             self.rejects.append(m)
1426
1427         for m in utils.check_size(".changes", self.pkg.files):
1428             self.rejects.append(m)
1429
1430         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1431             self.rejects.append(m)
1432
1433         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1434             self.rejects.append(m)
1435
1436         self.ensure_hashes()
1437
1438     ###########################################################################
1439
1440     def ensure_orig(self, target_dir='.', session=None):
1441         """
1442         Ensures that all orig files mentioned in the changes file are present
1443         in target_dir. If they do not exist, they are symlinked into place.
1444
1445         An list containing the symlinks that were created are returned (so they
1446         can be removed).
1447         """
1448
1449         symlinked = []
1450         cnf = Config()
1451
1452         for filename, entry in self.pkg.dsc_files.iteritems():
1453             if not re_is_orig_source.match(filename):
1454                 # File is not an orig; ignore
1455                 continue
1456
1457             if os.path.exists(filename):
1458                 # File exists, no need to continue
1459                 continue
1460
1461             def symlink_if_valid(path):
1462                 f = utils.open_file(path)
1463                 md5sum = apt_pkg.md5sum(f)
1464                 f.close()
1465
1466                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1467                 expected = (int(entry['size']), entry['md5sum'])
1468
1469                 if fingerprint != expected:
1470                     return False
1471
1472                 dest = os.path.join(target_dir, filename)
1473
1474                 os.symlink(path, dest)
1475                 symlinked.append(dest)
1476
1477                 return True
1478
1479             session_ = session
1480             if session is None:
1481                 session_ = DBConn().session()
1482
1483             found = False
1484
1485             # Look in the pool
1486             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1487                 poolfile_path = os.path.join(
1488                     poolfile.location.path, poolfile.filename
1489                 )
1490
1491                 if symlink_if_valid(poolfile_path):
1492                     found = True
1493                     break
1494
1495             if session is None:
1496                 session_.close()
1497
1498             if found:
1499                 continue
1500
1501             # Look in some other queues for the file
1502             queues = ('New', 'Byhand', 'ProposedUpdates',
1503                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1504
1505             for queue in queues:
1506                 if not cnf.get('Dir::Queue::%s' % queue):
1507                     continue
1508
1509                 queuefile_path = os.path.join(
1510                     cnf['Dir::Queue::%s' % queue], filename
1511                 )
1512
1513                 if not os.path.exists(queuefile_path):
1514                     # Does not exist in this queue
1515                     continue
1516
1517                 if symlink_if_valid(queuefile_path):
1518                     break
1519
1520         return symlinked
1521
1522     ###########################################################################
1523
1524     def check_lintian(self):
1525         """
1526         Extends self.rejects by checking the output of lintian against tags
1527         specified in Dinstall::LintianTags.
1528         """
1529
1530         cnf = Config()
1531
1532         # Don't reject binary uploads
1533         if not self.pkg.changes['architecture'].has_key('source'):
1534             return
1535
1536         # Only check some distributions
1537         for dist in ('unstable', 'experimental'):
1538             if dist in self.pkg.changes['distribution']:
1539                 break
1540         else:
1541             return
1542
1543         # If we do not have a tagfile, don't do anything
1544         tagfile = cnf.get("Dinstall::LintianTags")
1545         if not tagfile:
1546             return
1547
1548         # Parse the yaml file
1549         sourcefile = file(tagfile, 'r')
1550         sourcecontent = sourcefile.read()
1551         sourcefile.close()
1552
1553         try:
1554             lintiantags = yaml.load(sourcecontent)['lintian']
1555         except yaml.YAMLError, msg:
1556             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1557             return
1558
1559         # Try and find all orig mentioned in the .dsc
1560         symlinked = self.ensure_orig()
1561
1562         # Setup the input file for lintian
1563         fd, temp_filename = utils.temp_filename()
1564         temptagfile = os.fdopen(fd, 'w')
1565         for tags in lintiantags.values():
1566             temptagfile.writelines(['%s\n' % x for x in tags])
1567         temptagfile.close()
1568
1569         try:
1570             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1571                 (temp_filename, self.pkg.changes_file)
1572
1573             result, output = commands.getstatusoutput(cmd)
1574         finally:
1575             # Remove our tempfile and any symlinks we created
1576             os.unlink(temp_filename)
1577
1578             for symlink in symlinked:
1579                 os.unlink(symlink)
1580
1581         if result == 2:
1582             utils.warn("lintian failed for %s [return code: %s]." % \
1583                 (self.pkg.changes_file, result))
1584             utils.warn(utils.prefix_multi_line_string(output, \
1585                 " [possible output:] "))
1586
1587         def log(*txt):
1588             if self.logger:
1589                 self.logger.log(
1590                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1591                 )
1592
1593         # Generate messages
1594         parsed_tags = parse_lintian_output(output)
1595         self.rejects.extend(
1596             generate_reject_messages(parsed_tags, lintiantags, log=log)
1597         )
1598
1599     ###########################################################################
1600     def check_urgency(self):
1601         cnf = Config()
1602         if self.pkg.changes["architecture"].has_key("source"):
1603             if not self.pkg.changes.has_key("urgency"):
1604                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1606             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1607                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1608                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1609                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1610
1611     ###########################################################################
1612
1613     # Sanity check the time stamps of files inside debs.
1614     # [Files in the near future cause ugly warnings and extreme time
1615     #  travel can cause errors on extraction]
1616
1617     def check_timestamps(self):
1618         Cnf = Config()
1619
1620         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1621         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1622         tar = TarTime(future_cutoff, past_cutoff)
1623
1624         for filename, entry in self.pkg.files.items():
1625             if entry["type"] == "deb":
1626                 tar.reset()
1627                 try:
1628                     deb_file = utils.open_file(filename)
1629                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1630                     deb_file.seek(0)
1631                     try:
1632                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1633                     except SystemError, e:
1634                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1635                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1636                             raise
1637                         deb_file.seek(0)
1638                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1639
1640                     deb_file.close()
1641
1642                     future_files = tar.future_files.keys()
1643                     if future_files:
1644                         num_future_files = len(future_files)
1645                         future_file = future_files[0]
1646                         future_date = tar.future_files[future_file]
1647                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1649
1650                     ancient_files = tar.ancient_files.keys()
1651                     if ancient_files:
1652                         num_ancient_files = len(ancient_files)
1653                         ancient_file = ancient_files[0]
1654                         ancient_date = tar.ancient_files[ancient_file]
1655                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1657                 except:
1658                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1659
1660     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1662             sponsored = False
1663         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1664             sponsored = False
1665             if uid_name == "":
1666                 sponsored = True
1667         else:
1668             sponsored = True
1669             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1670                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1671                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673                         self.pkg.changes["sponsoremail"] = uid_email
1674
1675         return sponsored
1676
1677
1678     ###########################################################################
1679     # check_signed_by_key checks
1680     ###########################################################################
1681
1682     def check_signed_by_key(self):
1683         """Ensure the .changes is signed by an authorized uploader."""
1684         session = DBConn().session()
1685
1686         # First of all we check that the person has proper upload permissions
1687         # and that this upload isn't blocked
1688         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1689
1690         if fpr is None:
1691             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1692             return
1693
1694         # TODO: Check that import-keyring adds UIDs properly
1695         if not fpr.uid:
1696             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1697             return
1698
1699         # Check that the fingerprint which uploaded has permission to do so
1700         self.check_upload_permissions(fpr, session)
1701
1702         # Check that this package is not in a transition
1703         self.check_transition(session)
1704
1705         session.close()
1706
1707
1708     def check_upload_permissions(self, fpr, session):
1709         # Check any one-off upload blocks
1710         self.check_upload_blocks(fpr, session)
1711
1712         # Start with DM as a special case
1713         # DM is a special case unfortunately, so we check it first
1714         # (keys with no source access get more access than DMs in one
1715         #  way; DMs can only upload for their packages whether source
1716         #  or binary, whereas keys with no access might be able to
1717         #  upload some binaries)
1718         if fpr.source_acl.access_level == 'dm':
1719             self.check_dm_upload(fpr, session)
1720         else:
1721             # Check source-based permissions for other types
1722             if self.pkg.changes["architecture"].has_key("source") and \
1723                 fpr.source_acl.access_level is None:
1724                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1725                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1726                 self.rejects.append(rej)
1727                 return
1728             # If not a DM, we allow full upload rights
1729             uid_email = "%s@debian.org" % (fpr.uid.uid)
1730             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1731
1732
1733         # Check binary upload permissions
1734         # By this point we know that DMs can't have got here unless they
1735         # are allowed to deal with the package concerned so just apply
1736         # normal checks
1737         if fpr.binary_acl.access_level == 'full':
1738             return
1739
1740         # Otherwise we're in the map case
1741         tmparches = self.pkg.changes["architecture"].copy()
1742         tmparches.pop('source', None)
1743
1744         for bam in fpr.binary_acl_map:
1745             tmparches.pop(bam.architecture.arch_string, None)
1746
1747         if len(tmparches.keys()) > 0:
1748             if fpr.binary_reject:
1749                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1751                 self.rejects.append(rej)
1752             else:
1753                 # TODO: This is where we'll implement reject vs throw away binaries later
1754                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1755                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1756                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1757                 self.rejects.append(rej)
1758
1759
1760     def check_upload_blocks(self, fpr, session):
1761         """Check whether any upload blocks apply to this source, source
1762            version, uid / fpr combination"""
1763
1764         def block_rej_template(fb):
1765             rej = 'Manual upload block in place for package %s' % fb.source
1766             if fb.version is not None:
1767                 rej += ', version %s' % fb.version
1768             return rej
1769
1770         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1771             # version is None if the block applies to all versions
1772             if fb.version is None or fb.version == self.pkg.changes['version']:
1773                 # Check both fpr and uid - either is enough to cause a reject
1774                 if fb.fpr is not None:
1775                     if fb.fpr.fingerprint == fpr.fingerprint:
1776                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1777                 if fb.uid is not None:
1778                     if fb.uid == fpr.uid:
1779                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1780
1781
1782     def check_dm_upload(self, fpr, session):
1783         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1784         ## none of the uploaded packages are NEW
1785         rej = False
1786         for f in self.pkg.files.keys():
1787             if self.pkg.files[f].has_key("byhand"):
1788                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1789                 rej = True
1790             if self.pkg.files[f].has_key("new"):
1791                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1792                 rej = True
1793
1794         if rej:
1795             return
1796
1797         r = get_newest_source(self.pkg.changes["source"], session)
1798
1799         if r is None:
1800             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1801             self.rejects.append(rej)
1802             return
1803
1804         if not r.dm_upload_allowed:
1805             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1806             self.rejects.append(rej)
1807             return
1808
1809         ## the Maintainer: field of the uploaded .changes file corresponds with
1810         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1811         ## uploads)
1812         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1813             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1814
1815         ## the most recent version of the package uploaded to unstable or
1816         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1817         ## non-developer maintainers cannot NMU or hijack packages)
1818
1819         # srcuploaders includes the maintainer
1820         accept = False
1821         for sup in r.srcuploaders:
1822             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1823             # Eww - I hope we never have two people with the same name in Debian
1824             if email == fpr.uid.uid or name == fpr.uid.name:
1825                 accept = True
1826                 break
1827
1828         if not accept:
1829             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1830             return
1831
1832         ## none of the packages are being taken over from other source packages
1833         for b in self.pkg.changes["binary"].keys():
1834             for suite in self.pkg.changes["distribution"].keys():
1835                 for s in get_source_by_package_and_suite(b, suite, session):
1836                     if s.source != self.pkg.changes["source"]:
1837                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1838
1839
1840
1841     def check_transition(self, session):
1842         cnf = Config()
1843
1844         sourcepkg = self.pkg.changes["source"]
1845
1846         # No sourceful upload -> no need to do anything else, direct return
1847         # We also work with unstable uploads, not experimental or those going to some
1848         # proposed-updates queue
1849         if "source" not in self.pkg.changes["architecture"] or \
1850            "unstable" not in self.pkg.changes["distribution"]:
1851             return
1852
1853         # Also only check if there is a file defined (and existant) with
1854         # checks.
1855         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1856         if transpath == "" or not os.path.exists(transpath):
1857             return
1858
1859         # Parse the yaml file
1860         sourcefile = file(transpath, 'r')
1861         sourcecontent = sourcefile.read()
1862         try:
1863             transitions = yaml.load(sourcecontent)
1864         except yaml.YAMLError, msg:
1865             # This shouldn't happen, there is a wrapper to edit the file which
1866             # checks it, but we prefer to be safe than ending up rejecting
1867             # everything.
1868             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1869             return
1870
1871         # Now look through all defined transitions
1872         for trans in transitions:
1873             t = transitions[trans]
1874             source = t["source"]
1875             expected = t["new"]
1876
1877             # Will be None if nothing is in testing.
1878             current = get_source_in_suite(source, "testing", session)
1879             if current is not None:
1880                 compare = apt_pkg.VersionCompare(current.version, expected)
1881
1882             if current is None or compare < 0:
1883                 # This is still valid, the current version in testing is older than
1884                 # the new version we wait for, or there is none in testing yet
1885
1886                 # Check if the source we look at is affected by this.
1887                 if sourcepkg in t['packages']:
1888                     # The source is affected, lets reject it.
1889
1890                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1891                         sourcepkg, trans)
1892
1893                     if current is not None:
1894                         currentlymsg = "at version %s" % (current.version)
1895                     else:
1896                         currentlymsg = "not present in testing"
1897
1898                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1899
1900                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1901 is part of a testing transition designed to get %s migrated (it is
1902 currently %s, we need version %s).  This transition is managed by the
1903 Release Team, and %s is the Release-Team member responsible for it.
1904 Please mail debian-release@lists.debian.org or contact %s directly if you
1905 need further assistance.  You might want to upload to experimental until this
1906 transition is done."""
1907                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1908
1909                     self.rejects.append(rejectmsg)
1910                     return
1911
1912     ###########################################################################
1913     # End check_signed_by_key checks
1914     ###########################################################################
1915
1916     def build_summaries(self):
1917         """ Build a summary of changes the upload introduces. """
1918
1919         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1920
1921         short_summary = summary
1922
1923         # This is for direport's benefit...
1924         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1925
1926         if byhand or new:
1927             summary += "Changes: " + f
1928
1929         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1930
1931         summary += self.announce(short_summary, 0)
1932
1933         return (summary, short_summary)
1934
1935     ###########################################################################
1936
1937     def close_bugs(self, summary, action):
1938         """
1939         Send mail to close bugs as instructed by the closes field in the changes file.
1940         Also add a line to summary if any work was done.
1941
1942         @type summary: string
1943         @param summary: summary text, as given by L{build_summaries}
1944
1945         @type action: bool
1946         @param action: Set to false no real action will be done.
1947
1948         @rtype: string
1949         @return: summary. If action was taken, extended by the list of closed bugs.
1950
1951         """
1952
1953         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1954
1955         bugs = self.pkg.changes["closes"].keys()
1956
1957         if not bugs:
1958             return summary
1959
1960         bugs.sort()
1961         summary += "Closing bugs: "
1962         for bug in bugs:
1963             summary += "%s " % (bug)
1964             if action:
1965                 self.update_subst()
1966                 self.Subst["__BUG_NUMBER__"] = bug
1967                 if self.pkg.changes["distribution"].has_key("stable"):
1968                     self.Subst["__STABLE_WARNING__"] = """
1969 Note that this package is not part of the released stable Debian
1970 distribution.  It may have dependencies on other unreleased software,
1971 or other instabilities.  Please take care if you wish to install it.
1972 The update will eventually make its way into the next released Debian
1973 distribution."""
1974                 else:
1975                     self.Subst["__STABLE_WARNING__"] = ""
1976                 mail_message = utils.TemplateSubst(self.Subst, template)
1977                 utils.send_mail(mail_message)
1978
1979                 # Clear up after ourselves
1980                 del self.Subst["__BUG_NUMBER__"]
1981                 del self.Subst["__STABLE_WARNING__"]
1982
1983         if action and self.logger:
1984             self.logger.log(["closing bugs"] + bugs)
1985
1986         summary += "\n"
1987
1988         return summary
1989
1990     ###########################################################################
1991
1992     def announce(self, short_summary, action):
1993         """
1994         Send an announce mail about a new upload.
1995
1996         @type short_summary: string
1997         @param short_summary: Short summary text to include in the mail
1998
1999         @type action: bool
2000         @param action: Set to false no real action will be done.
2001
2002         @rtype: string
2003         @return: Textstring about action taken.
2004
2005         """
2006
2007         cnf = Config()
2008         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2009
2010         # Only do announcements for source uploads with a recent dpkg-dev installed
2011         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2012            self.pkg.changes["architecture"].has_key("source"):
2013             return ""
2014
2015         lists_done = {}
2016         summary = ""
2017
2018         self.Subst["__SHORT_SUMMARY__"] = short_summary
2019
2020         for dist in self.pkg.changes["distribution"].keys():
2021             suite = get_suite(dist)
2022             if suite is None: continue
2023             announce_list = suite.announce
2024             if announce_list == "" or lists_done.has_key(announce_list):
2025                 continue
2026
2027             lists_done[announce_list] = 1
2028             summary += "Announcing to %s\n" % (announce_list)
2029
2030             if action:
2031                 self.update_subst()
2032                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2033                 if cnf.get("Dinstall::TrackingServer") and \
2034                    self.pkg.changes["architecture"].has_key("source"):
2035                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2036                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2037
2038                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2039                 utils.send_mail(mail_message)
2040
2041                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2042
2043         if cnf.FindB("Dinstall::CloseBugs"):
2044             summary = self.close_bugs(summary, action)
2045
2046         del self.Subst["__SHORT_SUMMARY__"]
2047
2048         return summary
2049
2050     ###########################################################################
2051     @session_wrapper
2052     def accept (self, summary, short_summary, session=None):
2053         """
2054         Accept an upload.
2055
2056         This moves all files referenced from the .changes into the pool,
2057         sends the accepted mail, announces to lists, closes bugs and
2058         also checks for override disparities. If enabled it will write out
2059         the version history for the BTS Version Tracking and will finally call
2060         L{queue_build}.
2061
2062         @type summary: string
2063         @param summary: Summary text
2064
2065         @type short_summary: string
2066         @param short_summary: Short summary
2067         """
2068
2069         cnf = Config()
2070         stats = SummaryStats()
2071
2072         print "Installing."
2073         self.logger.log(["installing changes", self.pkg.changes_file])
2074
2075         poolfiles = []
2076
2077         # Add the .dsc file to the DB first
2078         for newfile, entry in self.pkg.files.items():
2079             if entry["type"] == "dsc":
2080                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2081                 for j in pfs:
2082                     poolfiles.append(j)
2083
2084         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2085         for newfile, entry in self.pkg.files.items():
2086             if entry["type"] == "deb":
2087                 poolfiles.append(add_deb_to_db(self, newfile, session))
2088
2089         # If this is a sourceful diff only upload that is moving
2090         # cross-component we need to copy the .orig files into the new
2091         # component too for the same reasons as above.
2092         # XXX: mhy: I think this should be in add_dsc_to_db
2093         if self.pkg.changes["architecture"].has_key("source"):
2094             for orig_file in self.pkg.orig_files.keys():
2095                 if not self.pkg.orig_files[orig_file].has_key("id"):
2096                     continue # Skip if it's not in the pool
2097                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2098                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2099                     continue # Skip if the location didn't change
2100
2101                 # Do the move
2102                 oldf = get_poolfile_by_id(orig_file_id, session)
2103                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2104                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2105                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2106
2107                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2108
2109                 # TODO: Care about size/md5sum collisions etc
2110                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2111
2112                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2113                 if newf is None:
2114                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2115                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2116
2117                     session.flush()
2118
2119                     # Don't reference the old file from this changes
2120                     for p in poolfiles:
2121                         if p.file_id == oldf.file_id:
2122                             poolfiles.remove(p)
2123
2124                     poolfiles.append(newf)
2125
2126                     # Fix up the DSC references
2127                     toremove = []
2128
2129                     for df in source.srcfiles:
2130                         if df.poolfile.file_id == oldf.file_id:
2131                             # Add a new DSC entry and mark the old one for deletion
2132                             # Don't do it in the loop so we don't change the thing we're iterating over
2133                             newdscf = DSCFile()
2134                             newdscf.source_id = source.source_id
2135                             newdscf.poolfile_id = newf.file_id
2136                             session.add(newdscf)
2137
2138                             toremove.append(df)
2139
2140                     for df in toremove:
2141                         session.delete(df)
2142
2143                     # Flush our changes
2144                     session.flush()
2145
2146                     # Make sure that our source object is up-to-date
2147                     session.expire(source)
2148
2149         # Add changelog information to the database
2150         self.store_changelog()
2151
2152         # Install the files into the pool
2153         for newfile, entry in self.pkg.files.items():
2154             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2155             utils.move(newfile, destination)
2156             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2157             stats.accept_bytes += float(entry["size"])
2158
2159         # Copy the .changes file across for suite which need it.
2160         copy_changes = dict([(x.copychanges, '')
2161                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2162                              if x.copychanges is not None])
2163
2164         for dest in copy_changes.keys():
2165             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2166
2167         # We're done - commit the database changes
2168         session.commit()
2169         # Our SQL session will automatically start a new transaction after
2170         # the last commit
2171
2172         # Move the .changes into the 'done' directory
2173         utils.move(self.pkg.changes_file,
2174                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2175
2176         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2177             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2178
2179         self.update_subst()
2180         self.Subst["__SUMMARY__"] = summary
2181         mail_message = utils.TemplateSubst(self.Subst,
2182                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2183         utils.send_mail(mail_message)
2184         self.announce(short_summary, 1)
2185
2186         ## Helper stuff for DebBugs Version Tracking
2187         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2188             if self.pkg.changes["architecture"].has_key("source"):
2189                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2190                 version_history = os.fdopen(fd, 'w')
2191                 version_history.write(self.pkg.dsc["bts changelog"])
2192                 version_history.close()
2193                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2194                                       self.pkg.changes_file[:-8]+".versions")
2195                 os.rename(temp_filename, filename)
2196                 os.chmod(filename, 0644)
2197
2198             # Write out the binary -> source mapping.
2199             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2200             debinfo = os.fdopen(fd, 'w')
2201             for name, entry in sorted(self.pkg.files.items()):
2202                 if entry["type"] == "deb":
2203                     line = " ".join([entry["package"], entry["version"],
2204                                      entry["architecture"], entry["source package"],
2205                                      entry["source version"]])
2206                     debinfo.write(line+"\n")
2207             debinfo.close()
2208             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2209                                   self.pkg.changes_file[:-8]+".debinfo")
2210             os.rename(temp_filename, filename)
2211             os.chmod(filename, 0644)
2212
2213         session.commit()
2214
2215         # Set up our copy queues (e.g. buildd queues)
2216         for suite_name in self.pkg.changes["distribution"].keys():
2217             suite = get_suite(suite_name, session)
2218             for q in suite.copy_queues:
2219                 for f in poolfiles:
2220                     q.add_file_from_pool(f)
2221
2222         session.commit()
2223
2224         # Finally...
2225         stats.accept_count += 1
2226
2227     def check_override(self):
2228         """
2229         Checks override entries for validity. Mails "Override disparity" warnings,
2230         if that feature is enabled.
2231
2232         Abandons the check if
2233           - override disparity checks are disabled
2234           - mail sending is disabled
2235         """
2236
2237         cnf = Config()
2238
2239         # Abandon the check if override disparity checks have been disabled
2240         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2241             return
2242
2243         summary = self.pkg.check_override()
2244
2245         if summary == "":
2246             return
2247
2248         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2249
2250         self.update_subst()
2251         self.Subst["__SUMMARY__"] = summary
2252         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2253         utils.send_mail(mail_message)
2254         del self.Subst["__SUMMARY__"]
2255
2256     ###########################################################################
2257
2258     def remove(self, from_dir=None):
2259         """
2260         Used (for instance) in p-u to remove the package from unchecked
2261
2262         Also removes the package from holding area.
2263         """
2264         if from_dir is None:
2265             from_dir = self.pkg.directory
2266         h = Holding()
2267
2268         for f in self.pkg.files.keys():
2269             os.unlink(os.path.join(from_dir, f))
2270             if os.path.exists(os.path.join(h.holding_dir, f)):
2271                 os.unlink(os.path.join(h.holding_dir, f))
2272
2273         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2274         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2275             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2276
2277     ###########################################################################
2278
2279     def move_to_queue (self, queue):
2280         """
2281         Move files to a destination queue using the permissions in the table
2282         """
2283         h = Holding()
2284         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2285                    queue.path, perms=int(queue.change_perms, 8))
2286         for f in self.pkg.files.keys():
2287             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2288
2289     ###########################################################################
2290
2291     def force_reject(self, reject_files):
2292         """
2293         Forcefully move files from the current directory to the
2294         reject directory.  If any file already exists in the reject
2295         directory it will be moved to the morgue to make way for
2296         the new file.
2297
2298         @type reject_files: dict
2299         @param reject_files: file dictionary
2300
2301         """
2302
2303         cnf = Config()
2304
2305         for file_entry in reject_files:
2306             # Skip any files which don't exist or which we don't have permission to copy.
2307             if os.access(file_entry, os.R_OK) == 0:
2308                 continue
2309
2310             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2311
2312             try:
2313                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2314             except OSError, e:
2315                 # File exists?  Let's find a new name by adding a number
2316                 if e.errno == errno.EEXIST:
2317                     try:
2318                         dest_file = utils.find_next_free(dest_file, 255)
2319                     except NoFreeFilenameError:
2320                         # Something's either gone badly Pete Tong, or
2321                         # someone is trying to exploit us.
2322                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2323                         return
2324
2325                     # Make sure we really got it
2326                     try:
2327                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2328                     except OSError, e:
2329                         # Likewise
2330                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2331                         return
2332                 else:
2333                     raise
2334             # If we got here, we own the destination file, so we can
2335             # safely overwrite it.
2336             utils.move(file_entry, dest_file, 1, perms=0660)
2337             os.close(dest_fd)
2338
2339     ###########################################################################
2340     def do_reject (self, manual=0, reject_message="", notes=""):
2341         """
2342         Reject an upload. If called without a reject message or C{manual} is
2343         true, spawn an editor so the user can write one.
2344
2345         @type manual: bool
2346         @param manual: manual or automated rejection
2347
2348         @type reject_message: string
2349         @param reject_message: A reject message
2350
2351         @return: 0
2352
2353         """
2354         # If we weren't given a manual rejection message, spawn an
2355         # editor so the user can add one in...
2356         if manual and not reject_message:
2357             (fd, temp_filename) = utils.temp_filename()
2358             temp_file = os.fdopen(fd, 'w')
2359             if len(notes) > 0:
2360                 for note in notes:
2361                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2362                                     % (note.author, note.version, note.notedate, note.comment))
2363             temp_file.close()
2364             editor = os.environ.get("EDITOR","vi")
2365             answer = 'E'
2366             while answer == 'E':
2367                 os.system("%s %s" % (editor, temp_filename))
2368                 temp_fh = utils.open_file(temp_filename)
2369                 reject_message = "".join(temp_fh.readlines())
2370                 temp_fh.close()
2371                 print "Reject message:"
2372                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2373                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2374                 answer = "XXX"
2375                 while prompt.find(answer) == -1:
2376                     answer = utils.our_raw_input(prompt)
2377                     m = re_default_answer.search(prompt)
2378                     if answer == "":
2379                         answer = m.group(1)
2380                     answer = answer[:1].upper()
2381             os.unlink(temp_filename)
2382             if answer == 'A':
2383                 return 1
2384             elif answer == 'Q':
2385                 sys.exit(0)
2386
2387         print "Rejecting.\n"
2388
2389         cnf = Config()
2390
2391         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2392         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2393
2394         # Move all the files into the reject directory
2395         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2396         self.force_reject(reject_files)
2397
2398         # If we fail here someone is probably trying to exploit the race
2399         # so let's just raise an exception ...
2400         if os.path.exists(reason_filename):
2401             os.unlink(reason_filename)
2402         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2403
2404         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2405
2406         self.update_subst()
2407         if not manual:
2408             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2409             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2410             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2411             os.write(reason_fd, reject_message)
2412             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2413         else:
2414             # Build up the rejection email
2415             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2416             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2417             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2418             self.Subst["__REJECT_MESSAGE__"] = ""
2419             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2420             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2421             # Write the rejection email out as the <foo>.reason file
2422             os.write(reason_fd, reject_mail_message)
2423
2424         del self.Subst["__REJECTOR_ADDRESS__"]
2425         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2426         del self.Subst["__CC__"]
2427
2428         os.close(reason_fd)
2429
2430         # Send the rejection mail
2431         utils.send_mail(reject_mail_message)
2432
2433         if self.logger:
2434             self.logger.log(["rejected", self.pkg.changes_file])
2435
2436         return 0
2437
2438     ################################################################################
2439     def in_override_p(self, package, component, suite, binary_type, filename, session):
2440         """
2441         Check if a package already has override entries in the DB
2442
2443         @type package: string
2444         @param package: package name
2445
2446         @type component: string
2447         @param component: database id of the component
2448
2449         @type suite: int
2450         @param suite: database id of the suite
2451
2452         @type binary_type: string
2453         @param binary_type: type of the package
2454
2455         @type filename: string
2456         @param filename: filename we check
2457
2458         @return: the database result. But noone cares anyway.
2459
2460         """
2461
2462         cnf = Config()
2463
2464         if binary_type == "": # must be source
2465             file_type = "dsc"
2466         else:
2467             file_type = binary_type
2468
2469         # Override suite name; used for example with proposed-updates
2470         oldsuite = get_suite(suite, session)
2471         if (not oldsuite is None) and oldsuite.overridesuite:
2472             suite = oldsuite.overridesuite
2473
2474         result = get_override(package, suite, component, file_type, session)
2475
2476         # If checking for a source package fall back on the binary override type
2477         if file_type == "dsc" and len(result) < 1:
2478             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2479
2480         # Remember the section and priority so we can check them later if appropriate
2481         if len(result) > 0:
2482             result = result[0]
2483             self.pkg.files[filename]["override section"] = result.section.section
2484             self.pkg.files[filename]["override priority"] = result.priority.priority
2485             return result
2486
2487         return None
2488
2489     ################################################################################
2490     def get_anyversion(self, sv_list, suite):
2491         """
2492         @type sv_list: list
2493         @param sv_list: list of (suite, version) tuples to check
2494
2495         @type suite: string
2496         @param suite: suite name
2497
2498         Description: TODO
2499         """
2500         Cnf = Config()
2501         anyversion = None
2502         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2503         for (s, v) in sv_list:
2504             if s in [ x.lower() for x in anysuite ]:
2505                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2506                     anyversion = v
2507
2508         return anyversion
2509
2510     ################################################################################
2511
2512     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2513         """
2514         @type sv_list: list
2515         @param sv_list: list of (suite, version) tuples to check
2516
2517         @type filename: string
2518         @param filename: XXX
2519
2520         @type new_version: string
2521         @param new_version: XXX
2522
2523         Ensure versions are newer than existing packages in target
2524         suites and that cross-suite version checking rules as
2525         set out in the conf file are satisfied.
2526         """
2527
2528         cnf = Config()
2529
2530         # Check versions for each target suite
2531         for target_suite in self.pkg.changes["distribution"].keys():
2532             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2533             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2534
2535             # Enforce "must be newer than target suite" even if conffile omits it
2536             if target_suite not in must_be_newer_than:
2537                 must_be_newer_than.append(target_suite)
2538
2539             for (suite, existent_version) in sv_list:
2540                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2541
2542                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2543                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2544
2545                 if suite in must_be_older_than and vercmp > -1:
2546                     cansave = 0
2547
2548                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2549                         # we really use the other suite, ignoring the conflicting one ...
2550                         addsuite = self.pkg.changes["distribution-version"][suite]
2551
2552                         add_version = self.get_anyversion(sv_list, addsuite)
2553                         target_version = self.get_anyversion(sv_list, target_suite)
2554
2555                         if not add_version:
2556                             # not add_version can only happen if we map to a suite
2557                             # that doesn't enhance the suite we're propup'ing from.
2558                             # so "propup-ver x a b c; map a d" is a problem only if
2559                             # d doesn't enhance a.
2560                             #
2561                             # i think we could always propagate in this case, rather
2562                             # than complaining. either way, this isn't a REJECT issue
2563                             #
2564                             # And - we really should complain to the dorks who configured dak
2565                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2566                             self.pkg.changes.setdefault("propdistribution", {})
2567                             self.pkg.changes["propdistribution"][addsuite] = 1
2568                             cansave = 1
2569                         elif not target_version:
2570                             # not targets_version is true when the package is NEW
2571                             # we could just stick with the "...old version..." REJECT
2572                             # for this, I think.
2573                             self.rejects.append("Won't propogate NEW packages.")
2574                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2575                             # propogation would be redundant. no need to reject though.
2576                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2577                             cansave = 1
2578                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2579                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2580                             # propogate!!
2581                             self.warnings.append("Propogating upload to %s" % (addsuite))
2582                             self.pkg.changes.setdefault("propdistribution", {})
2583                             self.pkg.changes["propdistribution"][addsuite] = 1
2584                             cansave = 1
2585
2586                     if not cansave:
2587                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2588
2589     ################################################################################
2590     def check_binary_against_db(self, filename, session):
2591         # Ensure version is sane
2592         self.cross_suite_version_check( \
2593             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2594                 self.pkg.files[filename]["architecture"], session),
2595             filename, self.pkg.files[filename]["version"], sourceful=False)
2596
2597         # Check for any existing copies of the file
2598         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2599         q = q.filter_by(version=self.pkg.files[filename]["version"])
2600         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2601
2602         if q.count() > 0:
2603             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2604
2605     ################################################################################
2606
2607     def check_source_against_db(self, filename, session):
2608         source = self.pkg.dsc.get("source")
2609         version = self.pkg.dsc.get("version")
2610
2611         # Ensure version is sane
2612         self.cross_suite_version_check( \
2613             get_suite_version_by_source(source, session), filename, version,
2614             sourceful=True)
2615
2616     ################################################################################
2617     def check_dsc_against_db(self, filename, session):
2618         """
2619
2620         @warning: NB: this function can remove entries from the 'files' index [if
2621          the orig tarball is a duplicate of the one in the archive]; if
2622          you're iterating over 'files' and call this function as part of
2623          the loop, be sure to add a check to the top of the loop to
2624          ensure you haven't just tried to dereference the deleted entry.
2625
2626         """
2627
2628         Cnf = Config()
2629         self.pkg.orig_files = {} # XXX: do we need to clear it?
2630         orig_files = self.pkg.orig_files
2631
2632         # Try and find all files mentioned in the .dsc.  This has
2633         # to work harder to cope with the multiple possible
2634         # locations of an .orig.tar.gz.
2635         # The ordering on the select is needed to pick the newest orig
2636         # when it exists in multiple places.
2637         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2638             found = None
2639             if self.pkg.files.has_key(dsc_name):
2640                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2641                 actual_size = int(self.pkg.files[dsc_name]["size"])
2642                 found = "%s in incoming" % (dsc_name)
2643
2644                 # Check the file does not already exist in the archive
2645                 ql = get_poolfile_like_name(dsc_name, session)
2646
2647                 # Strip out anything that isn't '%s' or '/%s$'
2648                 for i in ql:
2649                     if not i.filename.endswith(dsc_name):
2650                         ql.remove(i)
2651
2652                 # "[dak] has not broken them.  [dak] has fixed a
2653                 # brokenness.  Your crappy hack exploited a bug in
2654                 # the old dinstall.
2655                 #
2656                 # "(Come on!  I thought it was always obvious that
2657                 # one just doesn't release different files with
2658                 # the same name and version.)"
2659                 #                        -- ajk@ on d-devel@l.d.o
2660
2661                 if len(ql) > 0:
2662                     # Ignore exact matches for .orig.tar.gz
2663                     match = 0
2664                     if re_is_orig_source.match(dsc_name):
2665                         for i in ql:
2666                             if self.pkg.files.has_key(dsc_name) and \
2667                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2668                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2669                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2670                                 # TODO: Don't delete the entry, just mark it as not needed
2671                                 # This would fix the stupidity of changing something we often iterate over
2672                                 # whilst we're doing it
2673                                 del self.pkg.files[dsc_name]
2674                                 dsc_entry["files id"] = i.file_id
2675                                 if not orig_files.has_key(dsc_name):
2676                                     orig_files[dsc_name] = {}
2677                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2678                                 match = 1
2679
2680                                 # Don't bitch that we couldn't find this file later
2681                                 try:
2682                                     self.later_check_files.remove(dsc_name)
2683                                 except ValueError:
2684                                     pass
2685
2686
2687                     if not match:
2688                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2689
2690             elif re_is_orig_source.match(dsc_name):
2691                 # Check in the pool
2692                 ql = get_poolfile_like_name(dsc_name, session)
2693
2694                 # Strip out anything that isn't '%s' or '/%s$'
2695                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2696                 for i in ql:
2697                     if not i.filename.endswith(dsc_name):
2698                         ql.remove(i)
2699
2700                 if len(ql) > 0:
2701                     # Unfortunately, we may get more than one match here if,
2702                     # for example, the package was in potato but had an -sa
2703                     # upload in woody.  So we need to choose the right one.
2704
2705                     # default to something sane in case we don't match any or have only one
2706                     x = ql[0]
2707
2708                     if len(ql) > 1:
2709                         for i in ql:
2710                             old_file = os.path.join(i.location.path, i.filename)
2711                             old_file_fh = utils.open_file(old_file)
2712                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2713                             old_file_fh.close()
2714                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2715                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2716                                 x = i
2717
2718                     old_file = os.path.join(i.location.path, i.filename)
2719                     old_file_fh = utils.open_file(old_file)
2720                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2721                     old_file_fh.close()
2722                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2723                     found = old_file
2724                     suite_type = x.location.archive_type
2725                     # need this for updating dsc_files in install()
2726                     dsc_entry["files id"] = x.file_id
2727                     # See install() in process-accepted...
2728                     if not orig_files.has_key(dsc_name):
2729                         orig_files[dsc_name] = {}
2730                     orig_files[dsc_name]["id"] = x.file_id
2731                     orig_files[dsc_name]["path"] = old_file
2732                     orig_files[dsc_name]["location"] = x.location.location_id
2733                 else:
2734                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2735                     # Not there? Check the queue directories...
2736                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2737                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2738                             continue
2739                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2740                         if os.path.exists(in_otherdir):
2741                             in_otherdir_fh = utils.open_file(in_otherdir)
2742                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2743                             in_otherdir_fh.close()
2744                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2745                             found = in_otherdir
2746                             if not orig_files.has_key(dsc_name):
2747                                 orig_files[dsc_name] = {}
2748                             orig_files[dsc_name]["path"] = in_otherdir
2749
2750                     if not found:
2751                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2752                         continue
2753             else:
2754                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2755                 continue
2756             if actual_md5 != dsc_entry["md5sum"]:
2757                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2758             if actual_size != int(dsc_entry["size"]):
2759                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2760
2761     ################################################################################
2762     # This is used by process-new and process-holding to recheck a changes file
2763     # at the time we're running.  It mainly wraps various other internal functions
2764     # and is similar to accepted_checks - these should probably be tidied up
2765     # and combined
2766     def recheck(self, session):
2767         cnf = Config()
2768         for f in self.pkg.files.keys():
2769             # The .orig.tar.gz can disappear out from under us is it's a
2770             # duplicate of one in the archive.
2771             if not self.pkg.files.has_key(f):
2772                 continue
2773
2774             entry = self.pkg.files[f]
2775
2776             # Check that the source still exists
2777             if entry["type"] == "deb":
2778                 source_version = entry["source version"]
2779                 source_package = entry["source package"]
2780                 if not self.pkg.changes["architecture"].has_key("source") \
2781                    and not source_exists(source_package, source_version, \
2782                     suites = self.pkg.changes["distribution"].keys(), session = session):
2783                     source_epochless_version = re_no_epoch.sub('', source_version)
2784                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2785                     found = False
2786                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2787                         if cnf.has_key("Dir::Queue::%s" % (q)):
2788                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2789                                 found = True
2790                     if not found:
2791                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2792
2793             # Version and file overwrite checks
2794             if entry["type"] == "deb":
2795                 self.check_binary_against_db(f, session)
2796             elif entry["type"] == "dsc":
2797                 self.check_source_against_db(f, session)
2798                 self.check_dsc_against_db(f, session)
2799
2800     ################################################################################
2801     def accepted_checks(self, overwrite_checks, session):
2802         # Recheck anything that relies on the database; since that's not
2803         # frozen between accept and our run time when called from p-a.
2804
2805         # overwrite_checks is set to False when installing to stable/oldstable
2806
2807         propogate={}
2808         nopropogate={}
2809
2810         # Find the .dsc (again)
2811         dsc_filename = None
2812         for f in self.pkg.files.keys():
2813             if self.pkg.files[f]["type"] == "dsc":
2814                 dsc_filename = f
2815
2816         for checkfile in self.pkg.files.keys():
2817             # The .orig.tar.gz can disappear out from under us is it's a
2818             # duplicate of one in the archive.
2819             if not self.pkg.files.has_key(checkfile):
2820                 continue
2821
2822             entry = self.pkg.files[checkfile]
2823
2824             # Check that the source still exists
2825             if entry["type"] == "deb":
2826                 source_version = entry["source version"]
2827                 source_package = entry["source package"]
2828                 if not self.pkg.changes["architecture"].has_key("source") \
2829                    and not source_exists(source_package, source_version, \
2830                     suites = self.pkg.changes["distribution"].keys(), \
2831                     session = session):
2832                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2833
2834             # Version and file overwrite checks
2835             if overwrite_checks:
2836                 if entry["type"] == "deb":
2837                     self.check_binary_against_db(checkfile, session)
2838                 elif entry["type"] == "dsc":
2839                     self.check_source_against_db(checkfile, session)
2840                     self.check_dsc_against_db(dsc_filename, session)
2841
2842             # propogate in the case it is in the override tables:
2843             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2844                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2845                     propogate[suite] = 1
2846                 else:
2847                     nopropogate[suite] = 1
2848
2849         for suite in propogate.keys():
2850             if suite in nopropogate:
2851                 continue
2852             self.pkg.changes["distribution"][suite] = 1
2853
2854         for checkfile in self.pkg.files.keys():
2855             # Check the package is still in the override tables
2856             for suite in self.pkg.changes["distribution"].keys():
2857                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2858                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2859
2860     ################################################################################
2861     # If any file of an upload has a recent mtime then chances are good
2862     # the file is still being uploaded.
2863
2864     def upload_too_new(self):
2865         cnf = Config()
2866         too_new = False
2867         # Move back to the original directory to get accurate time stamps
2868         cwd = os.getcwd()
2869         os.chdir(self.pkg.directory)
2870         file_list = self.pkg.files.keys()
2871         file_list.extend(self.pkg.dsc_files.keys())
2872         file_list.append(self.pkg.changes_file)
2873         for f in file_list:
2874             try:
2875                 last_modified = time.time()-os.path.getmtime(f)
2876                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2877                     too_new = True
2878                     break
2879             except:
2880                 pass
2881
2882         os.chdir(cwd)
2883         return too_new
2884
2885     def store_changelog(self):
2886
2887         # Skip binary-only upload if it is not a bin-NMU
2888         if not self.pkg.changes['architecture'].has_key('source'):
2889             from daklib.regexes import re_bin_only_nmu
2890             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2891                 return
2892
2893         session = DBConn().session()
2894
2895         # Check if upload already has a changelog entry
2896         query = """SELECT changelog_id FROM changes WHERE source = :source
2897                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2898         if session.execute(query, {'source': self.pkg.changes['source'], \
2899                                    'version': self.pkg.changes['version'], \
2900                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2901             session.commit()
2902             return
2903
2904         # Add current changelog text into changelogs_text table, return created ID
2905         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2906         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2907
2908         # Link ID to the upload available in changes table
2909         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2910                    AND version = :version AND architecture = :architecture"""
2911         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2912                                 'version': self.pkg.changes['version'], \
2913                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2914
2915         session.commit()