]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
queue.Upload: separate code to load .dsc from check_dsc
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @rtype: dict
122     @return: dictionary of NEW components.
123
124     """
125     # TODO: This should all use the database instead of parsing the changes
126     # file again
127     new = {}
128     byhand = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Build up a list of potentially new things
135     for name, f in files.items():
136         # Keep a record of byhand elements
137         if f["section"] == "byhand":
138             byhand[name] = 1
139             continue
140
141         pkg = f["package"]
142         priority = f["priority"]
143         section = f["section"]
144         file_type = get_type(f, session)
145         component = f["component"]
146
147         if file_type == "dsc":
148             priority = "source"
149
150         if not new.has_key(pkg):
151             new[pkg] = {}
152             new[pkg]["priority"] = priority
153             new[pkg]["section"] = section
154             new[pkg]["type"] = file_type
155             new[pkg]["component"] = component
156             new[pkg]["files"] = []
157         else:
158             old_type = new[pkg]["type"]
159             if old_type != file_type:
160                 # source gets trumped by deb or udeb
161                 if old_type == "dsc":
162                     new[pkg]["priority"] = priority
163                     new[pkg]["section"] = section
164                     new[pkg]["type"] = file_type
165                     new[pkg]["component"] = component
166
167         new[pkg]["files"].append(name)
168
169         if f.has_key("othercomponents"):
170             new[pkg]["othercomponents"] = f["othercomponents"]
171
172     # Fix up the list of target suites
173     cnf = Config()
174     for suite in changes["suite"].keys():
175         oldsuite = get_suite(suite, session)
176         if not oldsuite:
177             print "WARNING: Invalid suite %s found" % suite
178             continue
179
180         if oldsuite.overridesuite:
181             newsuite = get_suite(oldsuite.overridesuite, session)
182
183             if newsuite:
184                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185                     oldsuite.overridesuite, suite)
186                 del changes["suite"][suite]
187                 changes["suite"][oldsuite.overridesuite] = 1
188             else:
189                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
190                     oldsuite.overridesuite, suite)
191
192     # Check for unprocessed byhand files
193     if dbchg is not None:
194         for b in byhand.keys():
195             # Find the file entry in the database
196             found = False
197             for f in dbchg.files:
198                 if f.filename == b:
199                     found = True
200                     # If it's processed, we can ignore it
201                     if f.processed:
202                         del byhand[b]
203                     break
204
205             if not found:
206                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
207
208     # Check for new stuff
209     for suite in changes["suite"].keys():
210         for pkg in new.keys():
211             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
212             if len(ql) > 0:
213                 for file_entry in new[pkg]["files"]:
214                     if files[file_entry].has_key("new"):
215                         del files[file_entry]["new"]
216                 del new[pkg]
217
218     if warn:
219         for s in ['stable', 'oldstable']:
220             if changes["suite"].has_key(s):
221                 print "WARNING: overrides will be added for %s!" % s
222         for pkg in new.keys():
223             if new[pkg].has_key("othercomponents"):
224                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
225
226     return new, byhand
227
228 ################################################################################
229
230 def check_valid(new, session = None):
231     """
232     Check if section and priority for NEW packages exist in database.
233     Additionally does sanity checks:
234       - debian-installer packages have to be udeb (or source)
235       - non debian-installer packages can not be udeb
236       - source priority can only be assigned to dsc file types
237
238     @type new: dict
239     @param new: Dict of new packages with their section, priority and type.
240
241     """
242     for pkg in new.keys():
243         section_name = new[pkg]["section"]
244         priority_name = new[pkg]["priority"]
245         file_type = new[pkg]["type"]
246
247         section = get_section(section_name, session)
248         if section is None:
249             new[pkg]["section id"] = -1
250         else:
251             new[pkg]["section id"] = section.section_id
252
253         priority = get_priority(priority_name, session)
254         if priority is None:
255             new[pkg]["priority id"] = -1
256         else:
257             new[pkg]["priority id"] = priority.priority_id
258
259         # Sanity checks
260         di = section_name.find("debian-installer") != -1
261
262         # If d-i, we must be udeb and vice-versa
263         if     (di and file_type not in ("udeb", "dsc")) or \
264            (not di and file_type == "udeb"):
265             new[pkg]["section id"] = -1
266
267         # If dsc we need to be source and vice-versa
268         if (priority == "source" and file_type != "dsc") or \
269            (priority != "source" and file_type == "dsc"):
270             new[pkg]["priority id"] = -1
271
272 ###############################################################################
273
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276     def __init__(self, future_cutoff, past_cutoff):
277         self.reset()
278         self.future_cutoff = future_cutoff
279         self.past_cutoff = past_cutoff
280
281     def reset(self):
282         self.future_files = {}
283         self.ancient_files = {}
284
285     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286         if MTime > self.future_cutoff:
287             self.future_files[Name] = MTime
288         if MTime < self.past_cutoff:
289             self.ancient_files[Name] = MTime
290
291 ###############################################################################
292
293 def prod_maintainer(notes, upload):
294     cnf = Config()
295
296     # Here we prepare an editor and get them ready to prod...
297     (fd, temp_filename) = utils.temp_filename()
298     temp_file = os.fdopen(fd, 'w')
299     for note in notes:
300         temp_file.write(note.comment)
301     temp_file.close()
302     editor = os.environ.get("EDITOR","vi")
303     answer = 'E'
304     while answer == 'E':
305         os.system("%s %s" % (editor, temp_filename))
306         temp_fh = utils.open_file(temp_filename)
307         prod_message = "".join(temp_fh.readlines())
308         temp_fh.close()
309         print "Prod message:"
310         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
311         prompt = "[P]rod, Edit, Abandon, Quit ?"
312         answer = "XXX"
313         while prompt.find(answer) == -1:
314             answer = utils.our_raw_input(prompt)
315             m = re_default_answer.search(prompt)
316             if answer == "":
317                 answer = m.group(1)
318             answer = answer[:1].upper()
319     os.unlink(temp_filename)
320     if answer == 'A':
321         return
322     elif answer == 'Q':
323         end()
324         sys.exit(0)
325     # Otherwise, do the proding...
326     user_email_address = utils.whoami() + " <%s>" % (
327         cnf["Dinstall::MyAdminAddress"])
328
329     Subst = upload.Subst
330
331     Subst["__FROM_ADDRESS__"] = user_email_address
332     Subst["__PROD_MESSAGE__"] = prod_message
333     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
334
335     prod_mail_message = utils.TemplateSubst(
336         Subst,cnf["Dir::Templates"]+"/process-new.prod")
337
338     # Send the prod mail
339     utils.send_mail(prod_mail_message)
340
341     print "Sent prodding message"
342
343 ################################################################################
344
345 def edit_note(note, upload, session, trainee=False):
346     # Write the current data to a temporary file
347     (fd, temp_filename) = utils.temp_filename()
348     editor = os.environ.get("EDITOR","vi")
349     answer = 'E'
350     while answer == 'E':
351         os.system("%s %s" % (editor, temp_filename))
352         temp_file = utils.open_file(temp_filename)
353         newnote = temp_file.read().rstrip()
354         temp_file.close()
355         print "New Note:"
356         print utils.prefix_multi_line_string(newnote,"  ")
357         prompt = "[D]one, Edit, Abandon, Quit ?"
358         answer = "XXX"
359         while prompt.find(answer) == -1:
360             answer = utils.our_raw_input(prompt)
361             m = re_default_answer.search(prompt)
362             if answer == "":
363                 answer = m.group(1)
364             answer = answer[:1].upper()
365     os.unlink(temp_filename)
366     if answer == 'A':
367         return
368     elif answer == 'Q':
369         end()
370         sys.exit(0)
371
372     comment = NewComment()
373     comment.package = upload.pkg.changes["source"]
374     comment.version = upload.pkg.changes["version"]
375     comment.comment = newnote
376     comment.author  = utils.whoami()
377     comment.trainee = trainee
378     session.add(comment)
379     session.commit()
380
381 ###############################################################################
382
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
385
386 def get_newest_source(source, session):
387     'returns the newest DBSource object in dm_suites'
388     ## the most recent version of the package uploaded to unstable or
389     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390     ## section of its control file
391     q = session.query(DBSource).filter_by(source = source). \
392         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393         order_by(desc('source.version'))
394     return q.first()
395
396 def get_suite_version_by_source(source, session):
397     'returns a list of tuples (suite_name, version) for source package'
398     q = session.query(Suite.suite_name, DBSource.version). \
399         join(Suite.sources).filter_by(source = source)
400     return q.all()
401
402 def get_source_by_package_and_suite(package, suite_name, session):
403     '''
404     returns a DBSource query filtered by DBBinary.package and this package's
405     suite_name
406     '''
407     return session.query(DBSource). \
408         join(DBSource.binaries).filter_by(package = package). \
409         join(DBBinary.suites).filter_by(suite_name = suite_name)
410
411 def get_suite_version_by_package(package, arch_string, session):
412     '''
413     returns a list of tuples (suite_name, version) for binary package and
414     arch_string
415     '''
416     return session.query(Suite.suite_name, DBBinary.version). \
417         join(Suite.binaries).filter_by(package = package). \
418         join(DBBinary.architecture). \
419         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
420
421 class Upload(object):
422     """
423     Everything that has to do with an upload processed.
424
425     """
426     def __init__(self):
427         self.logger = None
428         self.pkg = Changes()
429         self.reset()
430
431     ###########################################################################
432
433     def reset (self):
434         """ Reset a number of internal variables."""
435
436         # Initialize the substitution template map
437         cnf = Config()
438         self.Subst = {}
439         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
443
444         self.rejects = []
445         self.warnings = []
446         self.notes = []
447
448         self.later_check_files = []
449
450         self.pkg.reset()
451
452     def package_info(self):
453         """
454         Format various messages from this Upload to send to the maintainer.
455         """
456
457         msgs = (
458             ('Reject Reasons', self.rejects),
459             ('Warnings', self.warnings),
460             ('Notes', self.notes),
461         )
462
463         msg = ''
464         for title, messages in msgs:
465             if messages:
466                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
467         msg += '\n\n'
468
469         return msg
470
471     ###########################################################################
472     def update_subst(self):
473         """ Set up the per-package template substitution mappings """
474
475         cnf = Config()
476
477         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478         if not self.pkg.changes.has_key("architecture") or not \
479            isinstance(self.pkg.changes["architecture"], dict):
480             self.pkg.changes["architecture"] = { "Unknown" : "" }
481
482         # and maintainer2047 may not exist.
483         if not self.pkg.changes.has_key("maintainer2047"):
484             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
485
486         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
489
490         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491         if self.pkg.changes["architecture"].has_key("source") and \
492            self.pkg.changes["changedby822"] != "" and \
493            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
494
495             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
498         else:
499             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
502
503         # Process policy doesn't set the fingerprint field and I don't want to make it
504         # do it for now as I don't want to have to deal with the case where we accepted
505         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506         # the meantime so the package will be remarked as rejectable.  Urgh.
507         # TODO: Fix this properly
508         if self.pkg.changes.has_key('fingerprint'):
509             session = DBConn().session()
510             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512                 if self.pkg.changes.has_key("sponsoremail"):
513                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
514             session.close()
515
516         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
518
519         # Apply any global override of the Maintainer field
520         if cnf.get("Dinstall::OverrideMaintainer"):
521             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
523
524         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
528
529     ###########################################################################
530     def load_changes(self, filename):
531         """
532         Load a changes file and setup a dictionary around it. Also checks for mandantory
533         fields  within.
534
535         @type filename: string
536         @param filename: Changes filename, full path.
537
538         @rtype: boolean
539         @return: whether the changes file was valid or not.  We may want to
540                  reject even if this is True (see what gets put in self.rejects).
541                  This is simply to prevent us even trying things later which will
542                  fail because we couldn't properly parse the file.
543         """
544         Cnf = Config()
545         self.pkg.changes_file = filename
546
547         # Parse the .changes field into a dictionary
548         try:
549             self.pkg.changes.update(parse_changes(filename))
550         except CantOpenError:
551             self.rejects.append("%s: can't read file." % (filename))
552             return False
553         except ParseChangesError, line:
554             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555             return False
556         except ChangesUnicodeError:
557             self.rejects.append("%s: changes file not proper utf-8" % (filename))
558             return False
559
560         # Parse the Files field from the .changes into another dictionary
561         try:
562             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563         except ParseChangesError, line:
564             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
565             return False
566         except UnknownFormatError, format:
567             self.rejects.append("%s: unknown format '%s'." % (filename, format))
568             return False
569
570         # Check for mandatory fields
571         for i in ("distribution", "source", "binary", "architecture",
572                   "version", "maintainer", "files", "changes", "description"):
573             if not self.pkg.changes.has_key(i):
574                 # Avoid undefined errors later
575                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
576                 return False
577
578         # Strip a source version in brackets from the source field
579         if re_strip_srcver.search(self.pkg.changes["source"]):
580             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
581
582         # Ensure the source field is a valid package name.
583         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
585
586         # Split multi-value fields into a lower-level dictionary
587         for i in ("architecture", "distribution", "binary", "closes"):
588             o = self.pkg.changes.get(i, "")
589             if o != "":
590                 del self.pkg.changes[i]
591
592             self.pkg.changes[i] = {}
593
594             for j in o.split():
595                 self.pkg.changes[i][j] = 1
596
597         # Fix the Maintainer: field to be RFC822/2047 compatible
598         try:
599             (self.pkg.changes["maintainer822"],
600              self.pkg.changes["maintainer2047"],
601              self.pkg.changes["maintainername"],
602              self.pkg.changes["maintaineremail"]) = \
603                    fix_maintainer (self.pkg.changes["maintainer"])
604         except ParseMaintError, msg:
605             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606                    % (filename, self.pkg.changes["maintainer"], msg))
607
608         # ...likewise for the Changed-By: field if it exists.
609         try:
610             (self.pkg.changes["changedby822"],
611              self.pkg.changes["changedby2047"],
612              self.pkg.changes["changedbyname"],
613              self.pkg.changes["changedbyemail"]) = \
614                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
615         except ParseMaintError, msg:
616             self.pkg.changes["changedby822"] = ""
617             self.pkg.changes["changedby2047"] = ""
618             self.pkg.changes["changedbyname"] = ""
619             self.pkg.changes["changedbyemail"] = ""
620
621             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622                    % (filename, self.pkg.changes["changed-by"], msg))
623
624         # Ensure all the values in Closes: are numbers
625         if self.pkg.changes.has_key("closes"):
626             for i in self.pkg.changes["closes"].keys():
627                 if re_isanum.match (i) == None:
628                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
629
630         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
633
634         # Check the .changes is non-empty
635         if not self.pkg.files:
636             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
637             return False
638
639         # Changes was syntactically valid even if we'll reject
640         return True
641
642     ###########################################################################
643
644     def check_distributions(self):
645         "Check and map the Distribution field"
646
647         Cnf = Config()
648
649         # Handle suite mappings
650         for m in Cnf.ValueList("SuiteMappings"):
651             args = m.split()
652             mtype = args[0]
653             if mtype == "map" or mtype == "silent-map":
654                 (source, dest) = args[1:3]
655                 if self.pkg.changes["distribution"].has_key(source):
656                     del self.pkg.changes["distribution"][source]
657                     self.pkg.changes["distribution"][dest] = 1
658                     if mtype != "silent-map":
659                         self.notes.append("Mapping %s to %s." % (source, dest))
660                 if self.pkg.changes.has_key("distribution-version"):
661                     if self.pkg.changes["distribution-version"].has_key(source):
662                         self.pkg.changes["distribution-version"][source]=dest
663             elif mtype == "map-unreleased":
664                 (source, dest) = args[1:3]
665                 if self.pkg.changes["distribution"].has_key(source):
666                     for arch in self.pkg.changes["architecture"].keys():
667                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669                             del self.pkg.changes["distribution"][source]
670                             self.pkg.changes["distribution"][dest] = 1
671                             break
672             elif mtype == "ignore":
673                 suite = args[1]
674                 if self.pkg.changes["distribution"].has_key(suite):
675                     del self.pkg.changes["distribution"][suite]
676                     self.warnings.append("Ignoring %s as a target suite." % (suite))
677             elif mtype == "reject":
678                 suite = args[1]
679                 if self.pkg.changes["distribution"].has_key(suite):
680                     self.rejects.append("Uploads to %s are not accepted." % (suite))
681             elif mtype == "propup-version":
682                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
683                 #
684                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685                 if self.pkg.changes["distribution"].has_key(args[1]):
686                     self.pkg.changes.setdefault("distribution-version", {})
687                     for suite in args[2:]:
688                         self.pkg.changes["distribution-version"][suite] = suite
689
690         # Ensure there is (still) a target distribution
691         if len(self.pkg.changes["distribution"].keys()) < 1:
692             self.rejects.append("No valid distribution remaining.")
693
694         # Ensure target distributions exist
695         for suite in self.pkg.changes["distribution"].keys():
696             if not Cnf.has_key("Suite::%s" % (suite)):
697                 self.rejects.append("Unknown distribution `%s'." % (suite))
698
699     ###########################################################################
700
701     def binary_file_checks(self, f, session):
702         cnf = Config()
703         entry = self.pkg.files[f]
704
705         # Extract package control information
706         deb_file = utils.open_file(f)
707         try:
708             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
709         except:
710             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
711             deb_file.close()
712             # Can't continue, none of the checks on control would work.
713             return
714
715         # Check for mandantory "Description:"
716         deb_file.seek(0)
717         try:
718             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
719         except:
720             self.rejects.append("%s: Missing Description in binary package" % (f))
721             return
722
723         deb_file.close()
724
725         # Check for mandatory fields
726         for field in [ "Package", "Architecture", "Version" ]:
727             if control.Find(field) == None:
728                 # Can't continue
729                 self.rejects.append("%s: No %s field in control." % (f, field))
730                 return
731
732         # Ensure the package name matches the one give in the .changes
733         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
735
736         # Validate the package field
737         package = control.Find("Package")
738         if not re_valid_pkg_name.match(package):
739             self.rejects.append("%s: invalid package name '%s'." % (f, package))
740
741         # Validate the version field
742         version = control.Find("Version")
743         if not re_valid_version.match(version):
744             self.rejects.append("%s: invalid version number '%s'." % (f, version))
745
746         # Ensure the architecture of the .deb is one we know about.
747         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748         architecture = control.Find("Architecture")
749         upload_suite = self.pkg.changes["distribution"].keys()[0]
750
751         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753             self.rejects.append("Unknown architecture '%s'." % (architecture))
754
755         # Ensure the architecture of the .deb is one of the ones
756         # listed in the .changes.
757         if not self.pkg.changes["architecture"].has_key(architecture):
758             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
759
760         # Sanity-check the Depends field
761         depends = control.Find("Depends")
762         if depends == '':
763             self.rejects.append("%s: Depends field is empty." % (f))
764
765         # Sanity-check the Provides field
766         provides = control.Find("Provides")
767         if provides:
768             provide = re_spacestrip.sub('', provides)
769             if provide == '':
770                 self.rejects.append("%s: Provides field is empty." % (f))
771             prov_list = provide.split(",")
772             for prov in prov_list:
773                 if not re_valid_pkg_name.match(prov):
774                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
775
776         # If there is a Built-Using field, we need to check we can find the
777         # exact source version
778         built_using = control.Find("Built-Using")
779         if built_using:
780             try:
781                 entry["built-using"] = []
782                 for dep in apt_pkg.parse_depends(built_using):
783                     bu_s, bu_v, bu_e = dep[0]
784                     # Check that it's an exact match dependency and we have
785                     # some form of version
786                     if bu_e != "=" or len(bu_v) < 1:
787                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
788                     else:
789                         # Find the source id for this version
790                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
791                         if len(bu_so) != 1:
792                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
793                         else:
794                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
795
796             except ValueError, e:
797                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
798
799
800         # Check the section & priority match those given in the .changes (non-fatal)
801         if     control.Find("Section") and entry["section"] != "" \
802            and entry["section"] != control.Find("Section"):
803             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804                                 (f, control.Find("Section", ""), entry["section"]))
805         if control.Find("Priority") and entry["priority"] != "" \
806            and entry["priority"] != control.Find("Priority"):
807             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808                                 (f, control.Find("Priority", ""), entry["priority"]))
809
810         entry["package"] = package
811         entry["architecture"] = architecture
812         entry["version"] = version
813         entry["maintainer"] = control.Find("Maintainer", "")
814
815         if f.endswith(".udeb"):
816             self.pkg.files[f]["dbtype"] = "udeb"
817         elif f.endswith(".deb"):
818             self.pkg.files[f]["dbtype"] = "deb"
819         else:
820             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
821
822         entry["source"] = control.Find("Source", entry["package"])
823
824         # Get the source version
825         source = entry["source"]
826         source_version = ""
827
828         if source.find("(") != -1:
829             m = re_extract_src_version.match(source)
830             source = m.group(1)
831             source_version = m.group(2)
832
833         if not source_version:
834             source_version = self.pkg.files[f]["version"]
835
836         entry["source package"] = source
837         entry["source version"] = source_version
838
839         # Ensure the filename matches the contents of the .deb
840         m = re_isadeb.match(f)
841
842         #  package name
843         file_package = m.group(1)
844         if entry["package"] != file_package:
845             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846                                 (f, file_package, entry["dbtype"], entry["package"]))
847         epochless_version = re_no_epoch.sub('', control.Find("Version"))
848
849         #  version
850         file_version = m.group(2)
851         if epochless_version != file_version:
852             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853                                 (f, file_version, entry["dbtype"], epochless_version))
854
855         #  architecture
856         file_architecture = m.group(3)
857         if entry["architecture"] != file_architecture:
858             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
860
861         # Check for existent source
862         source_version = entry["source version"]
863         source_package = entry["source package"]
864         if self.pkg.changes["architecture"].has_key("source"):
865             if source_version != self.pkg.changes["version"]:
866                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867                                     (source_version, f, self.pkg.changes["version"]))
868         else:
869             # Check in the SQL database
870             if not source_exists(source_package, source_version, suites = \
871                 self.pkg.changes["distribution"].keys(), session = session):
872                 # Check in one of the other directories
873                 source_epochless_version = re_no_epoch.sub('', source_version)
874                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
875                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
876                     entry["byhand"] = 1
877                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
878                     entry["new"] = 1
879                 else:
880                     dsc_file_exists = False
881                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
882                         if cnf.has_key("Dir::Queue::%s" % (myq)):
883                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
884                                 dsc_file_exists = True
885                                 break
886
887                     if not dsc_file_exists:
888                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
889
890         # Check the version and for file overwrites
891         self.check_binary_against_db(f, session)
892
893     def source_file_checks(self, f, session):
894         entry = self.pkg.files[f]
895
896         m = re_issource.match(f)
897         if not m:
898             return
899
900         entry["package"] = m.group(1)
901         entry["version"] = m.group(2)
902         entry["type"] = m.group(3)
903
904         # Ensure the source package name matches the Source filed in the .changes
905         if self.pkg.changes["source"] != entry["package"]:
906             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
907
908         # Ensure the source version matches the version in the .changes file
909         if re_is_orig_source.match(f):
910             changes_version = self.pkg.changes["chopversion2"]
911         else:
912             changes_version = self.pkg.changes["chopversion"]
913
914         if changes_version != entry["version"]:
915             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
916
917         # Ensure the .changes lists source in the Architecture field
918         if not self.pkg.changes["architecture"].has_key("source"):
919             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
920
921         # Check the signature of a .dsc file
922         if entry["type"] == "dsc":
923             # check_signature returns either:
924             #  (None, [list, of, rejects]) or (signature, [])
925             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
926             for j in rejects:
927                 self.rejects.append(j)
928
929         entry["architecture"] = "source"
930
931     def per_suite_file_checks(self, f, suite, session):
932         cnf = Config()
933         entry = self.pkg.files[f]
934
935         # Skip byhand
936         if entry.has_key("byhand"):
937             return
938
939         # Check we have fields we need to do these checks
940         oktogo = True
941         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
942             if not entry.has_key(m):
943                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
944                 oktogo = False
945
946         if not oktogo:
947             return
948
949         # Handle component mappings
950         for m in cnf.ValueList("ComponentMappings"):
951             (source, dest) = m.split()
952             if entry["component"] == source:
953                 entry["original component"] = source
954                 entry["component"] = dest
955
956         # Ensure the component is valid for the target suite
957         if cnf.has_key("Suite:%s::Components" % (suite)) and \
958            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
959             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
960             return
961
962         # Validate the component
963         if not get_component(entry["component"], session):
964             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
965             return
966
967         # See if the package is NEW
968         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
969             entry["new"] = 1
970
971         # Validate the priority
972         if entry["priority"].find('/') != -1:
973             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
974
975         # Determine the location
976         location = cnf["Dir::Pool"]
977         l = get_location(location, entry["component"], session=session)
978         if l is None:
979             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
980             entry["location id"] = -1
981         else:
982             entry["location id"] = l.location_id
983
984         # Check the md5sum & size against existing files (if any)
985         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
986
987         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
988                                          entry["size"], entry["md5sum"], entry["location id"])
989
990         if found is None:
991             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
992         elif found is False and poolfile is not None:
993             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
994         else:
995             if poolfile is None:
996                 entry["files id"] = None
997             else:
998                 entry["files id"] = poolfile.file_id
999
1000         # Check for packages that have moved from one component to another
1001         entry['suite'] = suite
1002         arch_list = [entry["architecture"], 'all']
1003         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1004             [suite], arch_list = arch_list, session = session)
1005         if component is not None:
1006             entry["othercomponents"] = component
1007
1008     def check_files(self, action=True):
1009         file_keys = self.pkg.files.keys()
1010         holding = Holding()
1011         cnf = Config()
1012
1013         if action:
1014             cwd = os.getcwd()
1015             os.chdir(self.pkg.directory)
1016             for f in file_keys:
1017                 ret = holding.copy_to_holding(f)
1018                 if ret is not None:
1019                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1020
1021             os.chdir(cwd)
1022
1023         # check we already know the changes file
1024         # [NB: this check must be done post-suite mapping]
1025         base_filename = os.path.basename(self.pkg.changes_file)
1026
1027         session = DBConn().session()
1028
1029         try:
1030             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1031             # if in the pool or in a queue other than unchecked, reject
1032             if (dbc.in_queue is None) \
1033                    or (dbc.in_queue is not None
1034                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1035                 self.rejects.append("%s file already known to dak" % base_filename)
1036         except NoResultFound, e:
1037             # not known, good
1038             pass
1039
1040         has_binaries = False
1041         has_source = False
1042
1043         for f, entry in self.pkg.files.items():
1044             # Ensure the file does not already exist in one of the accepted directories
1045             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1046                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1047                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1048                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1049
1050             if not re_taint_free.match(f):
1051                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1052
1053             # Check the file is readable
1054             if os.access(f, os.R_OK) == 0:
1055                 # When running in -n, copy_to_holding() won't have
1056                 # generated the reject_message, so we need to.
1057                 if action:
1058                     if os.path.exists(f):
1059                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1060                     else:
1061                         # Don't directly reject, mark to check later to deal with orig's
1062                         # we can find in the pool
1063                         self.later_check_files.append(f)
1064                 entry["type"] = "unreadable"
1065                 continue
1066
1067             # If it's byhand skip remaining checks
1068             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1069                 entry["byhand"] = 1
1070                 entry["type"] = "byhand"
1071
1072             # Checks for a binary package...
1073             elif re_isadeb.match(f):
1074                 has_binaries = True
1075                 entry["type"] = "deb"
1076
1077                 # This routine appends to self.rejects/warnings as appropriate
1078                 self.binary_file_checks(f, session)
1079
1080             # Checks for a source package...
1081             elif re_issource.match(f):
1082                 has_source = True
1083
1084                 # This routine appends to self.rejects/warnings as appropriate
1085                 self.source_file_checks(f, session)
1086
1087             # Not a binary or source package?  Assume byhand...
1088             else:
1089                 entry["byhand"] = 1
1090                 entry["type"] = "byhand"
1091
1092             # Per-suite file checks
1093             entry["oldfiles"] = {}
1094             for suite in self.pkg.changes["distribution"].keys():
1095                 self.per_suite_file_checks(f, suite, session)
1096
1097         session.close()
1098
1099         # If the .changes file says it has source, it must have source.
1100         if self.pkg.changes["architecture"].has_key("source"):
1101             if not has_source:
1102                 self.rejects.append("no source found and Architecture line in changes mention source.")
1103
1104             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1105                 self.rejects.append("source only uploads are not supported.")
1106
1107     ###########################################################################
1108
1109     def __dsc_filename(self):
1110         """
1111         Returns: (Status, Dsc_Filename)
1112         where
1113           Status: Boolean; True when there was no error, False otherwise
1114           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1115         """
1116         dsc_filename = None
1117
1118         # find the dsc
1119         for name, entry in self.pkg.files.items():
1120             if entry.has_key("type") and entry["type"] == "dsc":
1121                 if dsc_filename:
1122                     return False, "cannot process a .changes file with multiple .dsc's."
1123                 else:
1124                     dsc_filename = name
1125
1126         if not dsc_filename:
1127             return False, "source uploads must contain a dsc file"
1128
1129         return True, dsc_filename
1130
1131     def load_dsc(self, action=True, signing_rules=1):
1132         """
1133         Find and load the dsc from self.pkg.files into self.dsc
1134
1135         Returns: (Status, Reason)
1136         where
1137           Status: Boolean; True when there was no error, False otherwise
1138           Reason: String; When Status is False this describes the error
1139         """
1140
1141         # find the dsc
1142         (status, dsc_filename) = self.__dsc_filename()
1143         if not status:
1144             # If status is false, dsc_filename has the reason
1145             return False, dsc_filename
1146
1147         try:
1148             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1149         except CantOpenError:
1150             if not action:
1151                 return False, "%s: can't read file." % (dsc_filename)
1152         except ParseChangesError, line:
1153             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1154         except InvalidDscError, line:
1155             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1156         except ChangesUnicodeError:
1157             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1158
1159         return True, None
1160
1161     ###########################################################################
1162
1163     def check_dsc(self, action=True, session=None):
1164         """Returns bool indicating whether or not the source changes are valid"""
1165         # Ensure there is source to check
1166         if not self.pkg.changes["architecture"].has_key("source"):
1167             return True
1168
1169         (status, reason) = self.load_dsc(action=action)
1170         if not status:
1171             self.rejects.append(reason)
1172             return False
1173         (status, dsc_filename) = self.__dsc_filename()
1174         if not status:
1175             # If status is false, dsc_filename has the reason
1176             self.rejects.append(dsc_filename)
1177             return False
1178
1179         # Build up the file list of files mentioned by the .dsc
1180         try:
1181             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1182         except NoFilesFieldError:
1183             self.rejects.append("%s: no Files: field." % (dsc_filename))
1184             return False
1185         except UnknownFormatError, format:
1186             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1187             return False
1188         except ParseChangesError, line:
1189             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1190             return False
1191
1192         # Enforce mandatory fields
1193         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1194             if not self.pkg.dsc.has_key(i):
1195                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1196                 return False
1197
1198         # Validate the source and version fields
1199         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1200             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1201         if not re_valid_version.match(self.pkg.dsc["version"]):
1202             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1203
1204         # Only a limited list of source formats are allowed in each suite
1205         for dist in self.pkg.changes["distribution"].keys():
1206             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1207             if self.pkg.dsc["format"] not in allowed:
1208                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1209
1210         # Validate the Maintainer field
1211         try:
1212             # We ignore the return value
1213             fix_maintainer(self.pkg.dsc["maintainer"])
1214         except ParseMaintError, msg:
1215             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1216                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1217
1218         # Validate the build-depends field(s)
1219         for field_name in [ "build-depends", "build-depends-indep" ]:
1220             field = self.pkg.dsc.get(field_name)
1221             if field:
1222                 # Have apt try to parse them...
1223                 try:
1224                     apt_pkg.ParseSrcDepends(field)
1225                 except:
1226                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1227
1228         # Ensure the version number in the .dsc matches the version number in the .changes
1229         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1230         changes_version = self.pkg.files[dsc_filename]["version"]
1231
1232         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1233             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1234
1235         # Ensure the Files field contain only what's expected
1236         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1237
1238         # Ensure source is newer than existing source in target suites
1239         session = DBConn().session()
1240         self.check_source_against_db(dsc_filename, session)
1241         self.check_dsc_against_db(dsc_filename, session)
1242
1243         dbchg = get_dbchange(self.pkg.changes_file, session)
1244
1245         # Finally, check if we're missing any files
1246         for f in self.later_check_files:
1247             print 'XXX: %s' % f
1248             # Check if we've already processed this file if we have a dbchg object
1249             ok = False
1250             if dbchg:
1251                 for pf in dbchg.files:
1252                     if pf.filename == f and pf.processed:
1253                         self.notes.append('%s was already processed so we can go ahead' % f)
1254                         ok = True
1255                         del self.pkg.files[f]
1256             if not ok:
1257                 self.rejects.append("Could not find file %s references in changes" % f)
1258
1259         session.close()
1260
1261         return True
1262
1263     ###########################################################################
1264
1265     def get_changelog_versions(self, source_dir):
1266         """Extracts a the source package and (optionally) grabs the
1267         version history out of debian/changelog for the BTS."""
1268
1269         cnf = Config()
1270
1271         # Find the .dsc (again)
1272         dsc_filename = None
1273         for f in self.pkg.files.keys():
1274             if self.pkg.files[f]["type"] == "dsc":
1275                 dsc_filename = f
1276
1277         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1278         if not dsc_filename:
1279             return
1280
1281         # Create a symlink mirror of the source files in our temporary directory
1282         for f in self.pkg.files.keys():
1283             m = re_issource.match(f)
1284             if m:
1285                 src = os.path.join(source_dir, f)
1286                 # If a file is missing for whatever reason, give up.
1287                 if not os.path.exists(src):
1288                     return
1289                 ftype = m.group(3)
1290                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1291                    self.pkg.orig_files[f].has_key("path"):
1292                     continue
1293                 dest = os.path.join(os.getcwd(), f)
1294                 os.symlink(src, dest)
1295
1296         # If the orig files are not a part of the upload, create symlinks to the
1297         # existing copies.
1298         for orig_file in self.pkg.orig_files.keys():
1299             if not self.pkg.orig_files[orig_file].has_key("path"):
1300                 continue
1301             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1302             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1303
1304         # Extract the source
1305         try:
1306             unpacked = UnpackedSource(dsc_filename)
1307         except:
1308             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1309             return
1310
1311         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1312             return
1313
1314         # Get the upstream version
1315         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1316         if re_strip_revision.search(upstr_version):
1317             upstr_version = re_strip_revision.sub('', upstr_version)
1318
1319         # Ensure the changelog file exists
1320         changelog_file = unpacked.get_changelog_file()
1321         if changelog_file is None:
1322             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1323             return
1324
1325         # Parse the changelog
1326         self.pkg.dsc["bts changelog"] = ""
1327         for line in changelog_file.readlines():
1328             m = re_changelog_versions.match(line)
1329             if m:
1330                 self.pkg.dsc["bts changelog"] += line
1331         changelog_file.close()
1332         unpacked.cleanup()
1333
1334         # Check we found at least one revision in the changelog
1335         if not self.pkg.dsc["bts changelog"]:
1336             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1337
1338     def check_source(self):
1339         # Bail out if:
1340         #    a) there's no source
1341         if not self.pkg.changes["architecture"].has_key("source"):
1342             return
1343
1344         tmpdir = utils.temp_dirname()
1345
1346         # Move into the temporary directory
1347         cwd = os.getcwd()
1348         os.chdir(tmpdir)
1349
1350         # Get the changelog version history
1351         self.get_changelog_versions(cwd)
1352
1353         # Move back and cleanup the temporary tree
1354         os.chdir(cwd)
1355
1356         try:
1357             shutil.rmtree(tmpdir)
1358         except OSError, e:
1359             if e.errno != errno.EACCES:
1360                 print "foobar"
1361                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1362
1363             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1364             # We probably have u-r or u-w directories so chmod everything
1365             # and try again.
1366             cmd = "chmod -R u+rwx %s" % (tmpdir)
1367             result = os.system(cmd)
1368             if result != 0:
1369                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1370             shutil.rmtree(tmpdir)
1371         except Exception, e:
1372             print "foobar2 (%s)" % e
1373             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1374
1375     ###########################################################################
1376     def ensure_hashes(self):
1377         # Make sure we recognise the format of the Files: field in the .changes
1378         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1379         if len(format) == 2:
1380             format = int(format[0]), int(format[1])
1381         else:
1382             format = int(float(format[0])), 0
1383
1384         # We need to deal with the original changes blob, as the fields we need
1385         # might not be in the changes dict serialised into the .dak anymore.
1386         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1387
1388         # Copy the checksums over to the current changes dict.  This will keep
1389         # the existing modifications to it intact.
1390         for field in orig_changes:
1391             if field.startswith('checksums-'):
1392                 self.pkg.changes[field] = orig_changes[field]
1393
1394         # Check for unsupported hashes
1395         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1396             self.rejects.append(j)
1397
1398         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1399             self.rejects.append(j)
1400
1401         # We have to calculate the hash if we have an earlier changes version than
1402         # the hash appears in rather than require it exist in the changes file
1403         for hashname, hashfunc, version in utils.known_hashes:
1404             # TODO: Move _ensure_changes_hash into this class
1405             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1406                 self.rejects.append(j)
1407             if "source" in self.pkg.changes["architecture"]:
1408                 # TODO: Move _ensure_dsc_hash into this class
1409                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1410                     self.rejects.append(j)
1411
1412     def check_hashes(self):
1413         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1414             self.rejects.append(m)
1415
1416         for m in utils.check_size(".changes", self.pkg.files):
1417             self.rejects.append(m)
1418
1419         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1420             self.rejects.append(m)
1421
1422         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1423             self.rejects.append(m)
1424
1425         self.ensure_hashes()
1426
1427     ###########################################################################
1428
1429     def ensure_orig(self, target_dir='.', session=None):
1430         """
1431         Ensures that all orig files mentioned in the changes file are present
1432         in target_dir. If they do not exist, they are symlinked into place.
1433
1434         An list containing the symlinks that were created are returned (so they
1435         can be removed).
1436         """
1437
1438         symlinked = []
1439         cnf = Config()
1440
1441         for filename, entry in self.pkg.dsc_files.iteritems():
1442             if not re_is_orig_source.match(filename):
1443                 # File is not an orig; ignore
1444                 continue
1445
1446             if os.path.exists(filename):
1447                 # File exists, no need to continue
1448                 continue
1449
1450             def symlink_if_valid(path):
1451                 f = utils.open_file(path)
1452                 md5sum = apt_pkg.md5sum(f)
1453                 f.close()
1454
1455                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1456                 expected = (int(entry['size']), entry['md5sum'])
1457
1458                 if fingerprint != expected:
1459                     return False
1460
1461                 dest = os.path.join(target_dir, filename)
1462
1463                 os.symlink(path, dest)
1464                 symlinked.append(dest)
1465
1466                 return True
1467
1468             session_ = session
1469             if session is None:
1470                 session_ = DBConn().session()
1471
1472             found = False
1473
1474             # Look in the pool
1475             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1476                 poolfile_path = os.path.join(
1477                     poolfile.location.path, poolfile.filename
1478                 )
1479
1480                 if symlink_if_valid(poolfile_path):
1481                     found = True
1482                     break
1483
1484             if session is None:
1485                 session_.close()
1486
1487             if found:
1488                 continue
1489
1490             # Look in some other queues for the file
1491             queues = ('New', 'Byhand', 'ProposedUpdates',
1492                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1493
1494             for queue in queues:
1495                 if not cnf.get('Dir::Queue::%s' % queue):
1496                     continue
1497
1498                 queuefile_path = os.path.join(
1499                     cnf['Dir::Queue::%s' % queue], filename
1500                 )
1501
1502                 if not os.path.exists(queuefile_path):
1503                     # Does not exist in this queue
1504                     continue
1505
1506                 if symlink_if_valid(queuefile_path):
1507                     break
1508
1509         return symlinked
1510
1511     ###########################################################################
1512
1513     def check_lintian(self):
1514         """
1515         Extends self.rejects by checking the output of lintian against tags
1516         specified in Dinstall::LintianTags.
1517         """
1518
1519         cnf = Config()
1520
1521         # Don't reject binary uploads
1522         if not self.pkg.changes['architecture'].has_key('source'):
1523             return
1524
1525         # Only check some distributions
1526         for dist in ('unstable', 'experimental'):
1527             if dist in self.pkg.changes['distribution']:
1528                 break
1529         else:
1530             return
1531
1532         # If we do not have a tagfile, don't do anything
1533         tagfile = cnf.get("Dinstall::LintianTags")
1534         if not tagfile:
1535             return
1536
1537         # Parse the yaml file
1538         sourcefile = file(tagfile, 'r')
1539         sourcecontent = sourcefile.read()
1540         sourcefile.close()
1541
1542         try:
1543             lintiantags = yaml.load(sourcecontent)['lintian']
1544         except yaml.YAMLError, msg:
1545             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1546             return
1547
1548         # Try and find all orig mentioned in the .dsc
1549         symlinked = self.ensure_orig()
1550
1551         # Setup the input file for lintian
1552         fd, temp_filename = utils.temp_filename()
1553         temptagfile = os.fdopen(fd, 'w')
1554         for tags in lintiantags.values():
1555             temptagfile.writelines(['%s\n' % x for x in tags])
1556         temptagfile.close()
1557
1558         try:
1559             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1560                 (temp_filename, self.pkg.changes_file)
1561
1562             result, output = commands.getstatusoutput(cmd)
1563         finally:
1564             # Remove our tempfile and any symlinks we created
1565             os.unlink(temp_filename)
1566
1567             for symlink in symlinked:
1568                 os.unlink(symlink)
1569
1570         if result == 2:
1571             utils.warn("lintian failed for %s [return code: %s]." % \
1572                 (self.pkg.changes_file, result))
1573             utils.warn(utils.prefix_multi_line_string(output, \
1574                 " [possible output:] "))
1575
1576         def log(*txt):
1577             if self.logger:
1578                 self.logger.log(
1579                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1580                 )
1581
1582         # Generate messages
1583         parsed_tags = parse_lintian_output(output)
1584         self.rejects.extend(
1585             generate_reject_messages(parsed_tags, lintiantags, log=log)
1586         )
1587
1588     ###########################################################################
1589     def check_urgency(self):
1590         cnf = Config()
1591         if self.pkg.changes["architecture"].has_key("source"):
1592             if not self.pkg.changes.has_key("urgency"):
1593                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1594             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1595             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1596                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1597                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1598                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1599
1600     ###########################################################################
1601
1602     # Sanity check the time stamps of files inside debs.
1603     # [Files in the near future cause ugly warnings and extreme time
1604     #  travel can cause errors on extraction]
1605
1606     def check_timestamps(self):
1607         Cnf = Config()
1608
1609         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1610         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1611         tar = TarTime(future_cutoff, past_cutoff)
1612
1613         for filename, entry in self.pkg.files.items():
1614             if entry["type"] == "deb":
1615                 tar.reset()
1616                 try:
1617                     deb_file = utils.open_file(filename)
1618                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1619                     deb_file.seek(0)
1620                     try:
1621                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1622                     except SystemError, e:
1623                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1624                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1625                             raise
1626                         deb_file.seek(0)
1627                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1628
1629                     deb_file.close()
1630
1631                     future_files = tar.future_files.keys()
1632                     if future_files:
1633                         num_future_files = len(future_files)
1634                         future_file = future_files[0]
1635                         future_date = tar.future_files[future_file]
1636                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1637                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1638
1639                     ancient_files = tar.ancient_files.keys()
1640                     if ancient_files:
1641                         num_ancient_files = len(ancient_files)
1642                         ancient_file = ancient_files[0]
1643                         ancient_date = tar.ancient_files[ancient_file]
1644                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1645                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1646                 except:
1647                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1648
1649     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1650         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1651             sponsored = False
1652         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1653             sponsored = False
1654             if uid_name == "":
1655                 sponsored = True
1656         else:
1657             sponsored = True
1658             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1659                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1660                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1661                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1662                         self.pkg.changes["sponsoremail"] = uid_email
1663
1664         return sponsored
1665
1666
1667     ###########################################################################
1668     # check_signed_by_key checks
1669     ###########################################################################
1670
1671     def check_signed_by_key(self):
1672         """Ensure the .changes is signed by an authorized uploader."""
1673         session = DBConn().session()
1674
1675         # First of all we check that the person has proper upload permissions
1676         # and that this upload isn't blocked
1677         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1678
1679         if fpr is None:
1680             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1681             return
1682
1683         # TODO: Check that import-keyring adds UIDs properly
1684         if not fpr.uid:
1685             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1686             return
1687
1688         # Check that the fingerprint which uploaded has permission to do so
1689         self.check_upload_permissions(fpr, session)
1690
1691         # Check that this package is not in a transition
1692         self.check_transition(session)
1693
1694         session.close()
1695
1696
1697     def check_upload_permissions(self, fpr, session):
1698         # Check any one-off upload blocks
1699         self.check_upload_blocks(fpr, session)
1700
1701         # Start with DM as a special case
1702         # DM is a special case unfortunately, so we check it first
1703         # (keys with no source access get more access than DMs in one
1704         #  way; DMs can only upload for their packages whether source
1705         #  or binary, whereas keys with no access might be able to
1706         #  upload some binaries)
1707         if fpr.source_acl.access_level == 'dm':
1708             self.check_dm_upload(fpr, session)
1709         else:
1710             # Check source-based permissions for other types
1711             if self.pkg.changes["architecture"].has_key("source") and \
1712                 fpr.source_acl.access_level is None:
1713                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1714                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1715                 self.rejects.append(rej)
1716                 return
1717             # If not a DM, we allow full upload rights
1718             uid_email = "%s@debian.org" % (fpr.uid.uid)
1719             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1720
1721
1722         # Check binary upload permissions
1723         # By this point we know that DMs can't have got here unless they
1724         # are allowed to deal with the package concerned so just apply
1725         # normal checks
1726         if fpr.binary_acl.access_level == 'full':
1727             return
1728
1729         # Otherwise we're in the map case
1730         tmparches = self.pkg.changes["architecture"].copy()
1731         tmparches.pop('source', None)
1732
1733         for bam in fpr.binary_acl_map:
1734             tmparches.pop(bam.architecture.arch_string, None)
1735
1736         if len(tmparches.keys()) > 0:
1737             if fpr.binary_reject:
1738                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1739                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1740                 self.rejects.append(rej)
1741             else:
1742                 # TODO: This is where we'll implement reject vs throw away binaries later
1743                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1744                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1745                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1746                 self.rejects.append(rej)
1747
1748
1749     def check_upload_blocks(self, fpr, session):
1750         """Check whether any upload blocks apply to this source, source
1751            version, uid / fpr combination"""
1752
1753         def block_rej_template(fb):
1754             rej = 'Manual upload block in place for package %s' % fb.source
1755             if fb.version is not None:
1756                 rej += ', version %s' % fb.version
1757             return rej
1758
1759         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1760             # version is None if the block applies to all versions
1761             if fb.version is None or fb.version == self.pkg.changes['version']:
1762                 # Check both fpr and uid - either is enough to cause a reject
1763                 if fb.fpr is not None:
1764                     if fb.fpr.fingerprint == fpr.fingerprint:
1765                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1766                 if fb.uid is not None:
1767                     if fb.uid == fpr.uid:
1768                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1769
1770
1771     def check_dm_upload(self, fpr, session):
1772         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1773         ## none of the uploaded packages are NEW
1774         rej = False
1775         for f in self.pkg.files.keys():
1776             if self.pkg.files[f].has_key("byhand"):
1777                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1778                 rej = True
1779             if self.pkg.files[f].has_key("new"):
1780                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1781                 rej = True
1782
1783         if rej:
1784             return
1785
1786         r = get_newest_source(self.pkg.changes["source"], session)
1787
1788         if r is None:
1789             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1790             self.rejects.append(rej)
1791             return
1792
1793         if not r.dm_upload_allowed:
1794             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1795             self.rejects.append(rej)
1796             return
1797
1798         ## the Maintainer: field of the uploaded .changes file corresponds with
1799         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1800         ## uploads)
1801         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1802             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1803
1804         ## the most recent version of the package uploaded to unstable or
1805         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1806         ## non-developer maintainers cannot NMU or hijack packages)
1807
1808         # srcuploaders includes the maintainer
1809         accept = False
1810         for sup in r.srcuploaders:
1811             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1812             # Eww - I hope we never have two people with the same name in Debian
1813             if email == fpr.uid.uid or name == fpr.uid.name:
1814                 accept = True
1815                 break
1816
1817         if not accept:
1818             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1819             return
1820
1821         ## none of the packages are being taken over from other source packages
1822         for b in self.pkg.changes["binary"].keys():
1823             for suite in self.pkg.changes["distribution"].keys():
1824                 for s in get_source_by_package_and_suite(b, suite, session):
1825                     if s.source != self.pkg.changes["source"]:
1826                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1827
1828
1829
1830     def check_transition(self, session):
1831         cnf = Config()
1832
1833         sourcepkg = self.pkg.changes["source"]
1834
1835         # No sourceful upload -> no need to do anything else, direct return
1836         # We also work with unstable uploads, not experimental or those going to some
1837         # proposed-updates queue
1838         if "source" not in self.pkg.changes["architecture"] or \
1839            "unstable" not in self.pkg.changes["distribution"]:
1840             return
1841
1842         # Also only check if there is a file defined (and existant) with
1843         # checks.
1844         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1845         if transpath == "" or not os.path.exists(transpath):
1846             return
1847
1848         # Parse the yaml file
1849         sourcefile = file(transpath, 'r')
1850         sourcecontent = sourcefile.read()
1851         try:
1852             transitions = yaml.load(sourcecontent)
1853         except yaml.YAMLError, msg:
1854             # This shouldn't happen, there is a wrapper to edit the file which
1855             # checks it, but we prefer to be safe than ending up rejecting
1856             # everything.
1857             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1858             return
1859
1860         # Now look through all defined transitions
1861         for trans in transitions:
1862             t = transitions[trans]
1863             source = t["source"]
1864             expected = t["new"]
1865
1866             # Will be None if nothing is in testing.
1867             current = get_source_in_suite(source, "testing", session)
1868             if current is not None:
1869                 compare = apt_pkg.VersionCompare(current.version, expected)
1870
1871             if current is None or compare < 0:
1872                 # This is still valid, the current version in testing is older than
1873                 # the new version we wait for, or there is none in testing yet
1874
1875                 # Check if the source we look at is affected by this.
1876                 if sourcepkg in t['packages']:
1877                     # The source is affected, lets reject it.
1878
1879                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1880                         sourcepkg, trans)
1881
1882                     if current is not None:
1883                         currentlymsg = "at version %s" % (current.version)
1884                     else:
1885                         currentlymsg = "not present in testing"
1886
1887                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1888
1889                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1890 is part of a testing transition designed to get %s migrated (it is
1891 currently %s, we need version %s).  This transition is managed by the
1892 Release Team, and %s is the Release-Team member responsible for it.
1893 Please mail debian-release@lists.debian.org or contact %s directly if you
1894 need further assistance.  You might want to upload to experimental until this
1895 transition is done."""
1896                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1897
1898                     self.rejects.append(rejectmsg)
1899                     return
1900
1901     ###########################################################################
1902     # End check_signed_by_key checks
1903     ###########################################################################
1904
1905     def build_summaries(self):
1906         """ Build a summary of changes the upload introduces. """
1907
1908         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1909
1910         short_summary = summary
1911
1912         # This is for direport's benefit...
1913         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1914
1915         if byhand or new:
1916             summary += "Changes: " + f
1917
1918         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1919
1920         summary += self.announce(short_summary, 0)
1921
1922         return (summary, short_summary)
1923
1924     ###########################################################################
1925
1926     def close_bugs(self, summary, action):
1927         """
1928         Send mail to close bugs as instructed by the closes field in the changes file.
1929         Also add a line to summary if any work was done.
1930
1931         @type summary: string
1932         @param summary: summary text, as given by L{build_summaries}
1933
1934         @type action: bool
1935         @param action: Set to false no real action will be done.
1936
1937         @rtype: string
1938         @return: summary. If action was taken, extended by the list of closed bugs.
1939
1940         """
1941
1942         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1943
1944         bugs = self.pkg.changes["closes"].keys()
1945
1946         if not bugs:
1947             return summary
1948
1949         bugs.sort()
1950         summary += "Closing bugs: "
1951         for bug in bugs:
1952             summary += "%s " % (bug)
1953             if action:
1954                 self.update_subst()
1955                 self.Subst["__BUG_NUMBER__"] = bug
1956                 if self.pkg.changes["distribution"].has_key("stable"):
1957                     self.Subst["__STABLE_WARNING__"] = """
1958 Note that this package is not part of the released stable Debian
1959 distribution.  It may have dependencies on other unreleased software,
1960 or other instabilities.  Please take care if you wish to install it.
1961 The update will eventually make its way into the next released Debian
1962 distribution."""
1963                 else:
1964                     self.Subst["__STABLE_WARNING__"] = ""
1965                 mail_message = utils.TemplateSubst(self.Subst, template)
1966                 utils.send_mail(mail_message)
1967
1968                 # Clear up after ourselves
1969                 del self.Subst["__BUG_NUMBER__"]
1970                 del self.Subst["__STABLE_WARNING__"]
1971
1972         if action and self.logger:
1973             self.logger.log(["closing bugs"] + bugs)
1974
1975         summary += "\n"
1976
1977         return summary
1978
1979     ###########################################################################
1980
1981     def announce(self, short_summary, action):
1982         """
1983         Send an announce mail about a new upload.
1984
1985         @type short_summary: string
1986         @param short_summary: Short summary text to include in the mail
1987
1988         @type action: bool
1989         @param action: Set to false no real action will be done.
1990
1991         @rtype: string
1992         @return: Textstring about action taken.
1993
1994         """
1995
1996         cnf = Config()
1997         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1998
1999         # Only do announcements for source uploads with a recent dpkg-dev installed
2000         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2001            self.pkg.changes["architecture"].has_key("source"):
2002             return ""
2003
2004         lists_done = {}
2005         summary = ""
2006
2007         self.Subst["__SHORT_SUMMARY__"] = short_summary
2008
2009         for dist in self.pkg.changes["distribution"].keys():
2010             suite = get_suite(dist)
2011             if suite is None: continue
2012             announce_list = suite.announce
2013             if announce_list == "" or lists_done.has_key(announce_list):
2014                 continue
2015
2016             lists_done[announce_list] = 1
2017             summary += "Announcing to %s\n" % (announce_list)
2018
2019             if action:
2020                 self.update_subst()
2021                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2022                 if cnf.get("Dinstall::TrackingServer") and \
2023                    self.pkg.changes["architecture"].has_key("source"):
2024                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2025                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2026
2027                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2028                 utils.send_mail(mail_message)
2029
2030                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2031
2032         if cnf.FindB("Dinstall::CloseBugs"):
2033             summary = self.close_bugs(summary, action)
2034
2035         del self.Subst["__SHORT_SUMMARY__"]
2036
2037         return summary
2038
2039     ###########################################################################
2040     @session_wrapper
2041     def accept (self, summary, short_summary, session=None):
2042         """
2043         Accept an upload.
2044
2045         This moves all files referenced from the .changes into the pool,
2046         sends the accepted mail, announces to lists, closes bugs and
2047         also checks for override disparities. If enabled it will write out
2048         the version history for the BTS Version Tracking and will finally call
2049         L{queue_build}.
2050
2051         @type summary: string
2052         @param summary: Summary text
2053
2054         @type short_summary: string
2055         @param short_summary: Short summary
2056         """
2057
2058         cnf = Config()
2059         stats = SummaryStats()
2060
2061         print "Installing."
2062         self.logger.log(["installing changes", self.pkg.changes_file])
2063
2064         poolfiles = []
2065
2066         # Add the .dsc file to the DB first
2067         for newfile, entry in self.pkg.files.items():
2068             if entry["type"] == "dsc":
2069                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2070                 for j in pfs:
2071                     poolfiles.append(j)
2072
2073         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2074         for newfile, entry in self.pkg.files.items():
2075             if entry["type"] == "deb":
2076                 poolfiles.append(add_deb_to_db(self, newfile, session))
2077
2078         # If this is a sourceful diff only upload that is moving
2079         # cross-component we need to copy the .orig files into the new
2080         # component too for the same reasons as above.
2081         # XXX: mhy: I think this should be in add_dsc_to_db
2082         if self.pkg.changes["architecture"].has_key("source"):
2083             for orig_file in self.pkg.orig_files.keys():
2084                 if not self.pkg.orig_files[orig_file].has_key("id"):
2085                     continue # Skip if it's not in the pool
2086                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2087                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2088                     continue # Skip if the location didn't change
2089
2090                 # Do the move
2091                 oldf = get_poolfile_by_id(orig_file_id, session)
2092                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2093                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2094                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2095
2096                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2097
2098                 # TODO: Care about size/md5sum collisions etc
2099                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2100
2101                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2102                 if newf is None:
2103                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2104                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2105
2106                     session.flush()
2107
2108                     # Don't reference the old file from this changes
2109                     for p in poolfiles:
2110                         if p.file_id == oldf.file_id:
2111                             poolfiles.remove(p)
2112
2113                     poolfiles.append(newf)
2114
2115                     # Fix up the DSC references
2116                     toremove = []
2117
2118                     for df in source.srcfiles:
2119                         if df.poolfile.file_id == oldf.file_id:
2120                             # Add a new DSC entry and mark the old one for deletion
2121                             # Don't do it in the loop so we don't change the thing we're iterating over
2122                             newdscf = DSCFile()
2123                             newdscf.source_id = source.source_id
2124                             newdscf.poolfile_id = newf.file_id
2125                             session.add(newdscf)
2126
2127                             toremove.append(df)
2128
2129                     for df in toremove:
2130                         session.delete(df)
2131
2132                     # Flush our changes
2133                     session.flush()
2134
2135                     # Make sure that our source object is up-to-date
2136                     session.expire(source)
2137
2138         # Add changelog information to the database
2139         self.store_changelog()
2140
2141         # Install the files into the pool
2142         for newfile, entry in self.pkg.files.items():
2143             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2144             utils.move(newfile, destination)
2145             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2146             stats.accept_bytes += float(entry["size"])
2147
2148         # Copy the .changes file across for suite which need it.
2149         copy_changes = dict([(x.copychanges, '')
2150                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2151                              if x.copychanges is not None])
2152
2153         for dest in copy_changes.keys():
2154             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2155
2156         # We're done - commit the database changes
2157         session.commit()
2158         # Our SQL session will automatically start a new transaction after
2159         # the last commit
2160
2161         # Move the .changes into the 'done' directory
2162         utils.move(self.pkg.changes_file,
2163                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2164
2165         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2166             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2167
2168         self.update_subst()
2169         self.Subst["__SUMMARY__"] = summary
2170         mail_message = utils.TemplateSubst(self.Subst,
2171                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2172         utils.send_mail(mail_message)
2173         self.announce(short_summary, 1)
2174
2175         ## Helper stuff for DebBugs Version Tracking
2176         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2177             if self.pkg.changes["architecture"].has_key("source"):
2178                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2179                 version_history = os.fdopen(fd, 'w')
2180                 version_history.write(self.pkg.dsc["bts changelog"])
2181                 version_history.close()
2182                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2183                                       self.pkg.changes_file[:-8]+".versions")
2184                 os.rename(temp_filename, filename)
2185                 os.chmod(filename, 0644)
2186
2187             # Write out the binary -> source mapping.
2188             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2189             debinfo = os.fdopen(fd, 'w')
2190             for name, entry in sorted(self.pkg.files.items()):
2191                 if entry["type"] == "deb":
2192                     line = " ".join([entry["package"], entry["version"],
2193                                      entry["architecture"], entry["source package"],
2194                                      entry["source version"]])
2195                     debinfo.write(line+"\n")
2196             debinfo.close()
2197             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2198                                   self.pkg.changes_file[:-8]+".debinfo")
2199             os.rename(temp_filename, filename)
2200             os.chmod(filename, 0644)
2201
2202         session.commit()
2203
2204         # Set up our copy queues (e.g. buildd queues)
2205         for suite_name in self.pkg.changes["distribution"].keys():
2206             suite = get_suite(suite_name, session)
2207             for q in suite.copy_queues:
2208                 for f in poolfiles:
2209                     q.add_file_from_pool(f)
2210
2211         session.commit()
2212
2213         # Finally...
2214         stats.accept_count += 1
2215
2216     def check_override(self):
2217         """
2218         Checks override entries for validity. Mails "Override disparity" warnings,
2219         if that feature is enabled.
2220
2221         Abandons the check if
2222           - override disparity checks are disabled
2223           - mail sending is disabled
2224         """
2225
2226         cnf = Config()
2227
2228         # Abandon the check if override disparity checks have been disabled
2229         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2230             return
2231
2232         summary = self.pkg.check_override()
2233
2234         if summary == "":
2235             return
2236
2237         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2238
2239         self.update_subst()
2240         self.Subst["__SUMMARY__"] = summary
2241         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2242         utils.send_mail(mail_message)
2243         del self.Subst["__SUMMARY__"]
2244
2245     ###########################################################################
2246
2247     def remove(self, from_dir=None):
2248         """
2249         Used (for instance) in p-u to remove the package from unchecked
2250
2251         Also removes the package from holding area.
2252         """
2253         if from_dir is None:
2254             from_dir = self.pkg.directory
2255         h = Holding()
2256
2257         for f in self.pkg.files.keys():
2258             os.unlink(os.path.join(from_dir, f))
2259             if os.path.exists(os.path.join(h.holding_dir, f)):
2260                 os.unlink(os.path.join(h.holding_dir, f))
2261
2262         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2263         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2264             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2265
2266     ###########################################################################
2267
2268     def move_to_queue (self, queue):
2269         """
2270         Move files to a destination queue using the permissions in the table
2271         """
2272         h = Holding()
2273         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2274                    queue.path, perms=int(queue.change_perms, 8))
2275         for f in self.pkg.files.keys():
2276             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2277
2278     ###########################################################################
2279
2280     def force_reject(self, reject_files):
2281         """
2282         Forcefully move files from the current directory to the
2283         reject directory.  If any file already exists in the reject
2284         directory it will be moved to the morgue to make way for
2285         the new file.
2286
2287         @type reject_files: dict
2288         @param reject_files: file dictionary
2289
2290         """
2291
2292         cnf = Config()
2293
2294         for file_entry in reject_files:
2295             # Skip any files which don't exist or which we don't have permission to copy.
2296             if os.access(file_entry, os.R_OK) == 0:
2297                 continue
2298
2299             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2300
2301             try:
2302                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2303             except OSError, e:
2304                 # File exists?  Let's find a new name by adding a number
2305                 if e.errno == errno.EEXIST:
2306                     try:
2307                         dest_file = utils.find_next_free(dest_file, 255)
2308                     except NoFreeFilenameError:
2309                         # Something's either gone badly Pete Tong, or
2310                         # someone is trying to exploit us.
2311                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2312                         return
2313
2314                     # Make sure we really got it
2315                     try:
2316                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2317                     except OSError, e:
2318                         # Likewise
2319                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2320                         return
2321                 else:
2322                     raise
2323             # If we got here, we own the destination file, so we can
2324             # safely overwrite it.
2325             utils.move(file_entry, dest_file, 1, perms=0660)
2326             os.close(dest_fd)
2327
2328     ###########################################################################
2329     def do_reject (self, manual=0, reject_message="", notes=""):
2330         """
2331         Reject an upload. If called without a reject message or C{manual} is
2332         true, spawn an editor so the user can write one.
2333
2334         @type manual: bool
2335         @param manual: manual or automated rejection
2336
2337         @type reject_message: string
2338         @param reject_message: A reject message
2339
2340         @return: 0
2341
2342         """
2343         # If we weren't given a manual rejection message, spawn an
2344         # editor so the user can add one in...
2345         if manual and not reject_message:
2346             (fd, temp_filename) = utils.temp_filename()
2347             temp_file = os.fdopen(fd, 'w')
2348             if len(notes) > 0:
2349                 for note in notes:
2350                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2351                                     % (note.author, note.version, note.notedate, note.comment))
2352             temp_file.close()
2353             editor = os.environ.get("EDITOR","vi")
2354             answer = 'E'
2355             while answer == 'E':
2356                 os.system("%s %s" % (editor, temp_filename))
2357                 temp_fh = utils.open_file(temp_filename)
2358                 reject_message = "".join(temp_fh.readlines())
2359                 temp_fh.close()
2360                 print "Reject message:"
2361                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2362                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2363                 answer = "XXX"
2364                 while prompt.find(answer) == -1:
2365                     answer = utils.our_raw_input(prompt)
2366                     m = re_default_answer.search(prompt)
2367                     if answer == "":
2368                         answer = m.group(1)
2369                     answer = answer[:1].upper()
2370             os.unlink(temp_filename)
2371             if answer == 'A':
2372                 return 1
2373             elif answer == 'Q':
2374                 sys.exit(0)
2375
2376         print "Rejecting.\n"
2377
2378         cnf = Config()
2379
2380         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2381         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2382
2383         # Move all the files into the reject directory
2384         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2385         self.force_reject(reject_files)
2386
2387         # If we fail here someone is probably trying to exploit the race
2388         # so let's just raise an exception ...
2389         if os.path.exists(reason_filename):
2390             os.unlink(reason_filename)
2391         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2392
2393         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2394
2395         self.update_subst()
2396         if not manual:
2397             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2398             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2399             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2400             os.write(reason_fd, reject_message)
2401             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2402         else:
2403             # Build up the rejection email
2404             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2405             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2406             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2407             self.Subst["__REJECT_MESSAGE__"] = ""
2408             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2409             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2410             # Write the rejection email out as the <foo>.reason file
2411             os.write(reason_fd, reject_mail_message)
2412
2413         del self.Subst["__REJECTOR_ADDRESS__"]
2414         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2415         del self.Subst["__CC__"]
2416
2417         os.close(reason_fd)
2418
2419         # Send the rejection mail
2420         utils.send_mail(reject_mail_message)
2421
2422         if self.logger:
2423             self.logger.log(["rejected", self.pkg.changes_file])
2424
2425         return 0
2426
2427     ################################################################################
2428     def in_override_p(self, package, component, suite, binary_type, filename, session):
2429         """
2430         Check if a package already has override entries in the DB
2431
2432         @type package: string
2433         @param package: package name
2434
2435         @type component: string
2436         @param component: database id of the component
2437
2438         @type suite: int
2439         @param suite: database id of the suite
2440
2441         @type binary_type: string
2442         @param binary_type: type of the package
2443
2444         @type filename: string
2445         @param filename: filename we check
2446
2447         @return: the database result. But noone cares anyway.
2448
2449         """
2450
2451         cnf = Config()
2452
2453         if binary_type == "": # must be source
2454             file_type = "dsc"
2455         else:
2456             file_type = binary_type
2457
2458         # Override suite name; used for example with proposed-updates
2459         oldsuite = get_suite(suite, session)
2460         if (not oldsuite is None) and oldsuite.overridesuite:
2461             suite = oldsuite.overridesuite
2462
2463         result = get_override(package, suite, component, file_type, session)
2464
2465         # If checking for a source package fall back on the binary override type
2466         if file_type == "dsc" and len(result) < 1:
2467             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2468
2469         # Remember the section and priority so we can check them later if appropriate
2470         if len(result) > 0:
2471             result = result[0]
2472             self.pkg.files[filename]["override section"] = result.section.section
2473             self.pkg.files[filename]["override priority"] = result.priority.priority
2474             return result
2475
2476         return None
2477
2478     ################################################################################
2479     def get_anyversion(self, sv_list, suite):
2480         """
2481         @type sv_list: list
2482         @param sv_list: list of (suite, version) tuples to check
2483
2484         @type suite: string
2485         @param suite: suite name
2486
2487         Description: TODO
2488         """
2489         Cnf = Config()
2490         anyversion = None
2491         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2492         for (s, v) in sv_list:
2493             if s in [ x.lower() for x in anysuite ]:
2494                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2495                     anyversion = v
2496
2497         return anyversion
2498
2499     ################################################################################
2500
2501     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2502         """
2503         @type sv_list: list
2504         @param sv_list: list of (suite, version) tuples to check
2505
2506         @type filename: string
2507         @param filename: XXX
2508
2509         @type new_version: string
2510         @param new_version: XXX
2511
2512         Ensure versions are newer than existing packages in target
2513         suites and that cross-suite version checking rules as
2514         set out in the conf file are satisfied.
2515         """
2516
2517         cnf = Config()
2518
2519         # Check versions for each target suite
2520         for target_suite in self.pkg.changes["distribution"].keys():
2521             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2522             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2523
2524             # Enforce "must be newer than target suite" even if conffile omits it
2525             if target_suite not in must_be_newer_than:
2526                 must_be_newer_than.append(target_suite)
2527
2528             for (suite, existent_version) in sv_list:
2529                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2530
2531                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2532                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2533
2534                 if suite in must_be_older_than and vercmp > -1:
2535                     cansave = 0
2536
2537                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2538                         # we really use the other suite, ignoring the conflicting one ...
2539                         addsuite = self.pkg.changes["distribution-version"][suite]
2540
2541                         add_version = self.get_anyversion(sv_list, addsuite)
2542                         target_version = self.get_anyversion(sv_list, target_suite)
2543
2544                         if not add_version:
2545                             # not add_version can only happen if we map to a suite
2546                             # that doesn't enhance the suite we're propup'ing from.
2547                             # so "propup-ver x a b c; map a d" is a problem only if
2548                             # d doesn't enhance a.
2549                             #
2550                             # i think we could always propagate in this case, rather
2551                             # than complaining. either way, this isn't a REJECT issue
2552                             #
2553                             # And - we really should complain to the dorks who configured dak
2554                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2555                             self.pkg.changes.setdefault("propdistribution", {})
2556                             self.pkg.changes["propdistribution"][addsuite] = 1
2557                             cansave = 1
2558                         elif not target_version:
2559                             # not targets_version is true when the package is NEW
2560                             # we could just stick with the "...old version..." REJECT
2561                             # for this, I think.
2562                             self.rejects.append("Won't propogate NEW packages.")
2563                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2564                             # propogation would be redundant. no need to reject though.
2565                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2566                             cansave = 1
2567                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2568                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2569                             # propogate!!
2570                             self.warnings.append("Propogating upload to %s" % (addsuite))
2571                             self.pkg.changes.setdefault("propdistribution", {})
2572                             self.pkg.changes["propdistribution"][addsuite] = 1
2573                             cansave = 1
2574
2575                     if not cansave:
2576                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2577
2578     ################################################################################
2579     def check_binary_against_db(self, filename, session):
2580         # Ensure version is sane
2581         self.cross_suite_version_check( \
2582             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2583                 self.pkg.files[filename]["architecture"], session),
2584             filename, self.pkg.files[filename]["version"], sourceful=False)
2585
2586         # Check for any existing copies of the file
2587         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2588         q = q.filter_by(version=self.pkg.files[filename]["version"])
2589         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2590
2591         if q.count() > 0:
2592             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2593
2594     ################################################################################
2595
2596     def check_source_against_db(self, filename, session):
2597         source = self.pkg.dsc.get("source")
2598         version = self.pkg.dsc.get("version")
2599
2600         # Ensure version is sane
2601         self.cross_suite_version_check( \
2602             get_suite_version_by_source(source, session), filename, version,
2603             sourceful=True)
2604
2605     ################################################################################
2606     def check_dsc_against_db(self, filename, session):
2607         """
2608
2609         @warning: NB: this function can remove entries from the 'files' index [if
2610          the orig tarball is a duplicate of the one in the archive]; if
2611          you're iterating over 'files' and call this function as part of
2612          the loop, be sure to add a check to the top of the loop to
2613          ensure you haven't just tried to dereference the deleted entry.
2614
2615         """
2616
2617         Cnf = Config()
2618         self.pkg.orig_files = {} # XXX: do we need to clear it?
2619         orig_files = self.pkg.orig_files
2620
2621         # Try and find all files mentioned in the .dsc.  This has
2622         # to work harder to cope with the multiple possible
2623         # locations of an .orig.tar.gz.
2624         # The ordering on the select is needed to pick the newest orig
2625         # when it exists in multiple places.
2626         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2627             found = None
2628             if self.pkg.files.has_key(dsc_name):
2629                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2630                 actual_size = int(self.pkg.files[dsc_name]["size"])
2631                 found = "%s in incoming" % (dsc_name)
2632
2633                 # Check the file does not already exist in the archive
2634                 ql = get_poolfile_like_name(dsc_name, session)
2635
2636                 # Strip out anything that isn't '%s' or '/%s$'
2637                 for i in ql:
2638                     if not i.filename.endswith(dsc_name):
2639                         ql.remove(i)
2640
2641                 # "[dak] has not broken them.  [dak] has fixed a
2642                 # brokenness.  Your crappy hack exploited a bug in
2643                 # the old dinstall.
2644                 #
2645                 # "(Come on!  I thought it was always obvious that
2646                 # one just doesn't release different files with
2647                 # the same name and version.)"
2648                 #                        -- ajk@ on d-devel@l.d.o
2649
2650                 if len(ql) > 0:
2651                     # Ignore exact matches for .orig.tar.gz
2652                     match = 0
2653                     if re_is_orig_source.match(dsc_name):
2654                         for i in ql:
2655                             if self.pkg.files.has_key(dsc_name) and \
2656                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2657                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2658                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2659                                 # TODO: Don't delete the entry, just mark it as not needed
2660                                 # This would fix the stupidity of changing something we often iterate over
2661                                 # whilst we're doing it
2662                                 del self.pkg.files[dsc_name]
2663                                 dsc_entry["files id"] = i.file_id
2664                                 if not orig_files.has_key(dsc_name):
2665                                     orig_files[dsc_name] = {}
2666                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2667                                 match = 1
2668
2669                                 # Don't bitch that we couldn't find this file later
2670                                 try:
2671                                     self.later_check_files.remove(dsc_name)
2672                                 except ValueError:
2673                                     pass
2674
2675
2676                     if not match:
2677                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2678
2679             elif re_is_orig_source.match(dsc_name):
2680                 # Check in the pool
2681                 ql = get_poolfile_like_name(dsc_name, session)
2682
2683                 # Strip out anything that isn't '%s' or '/%s$'
2684                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2685                 for i in ql:
2686                     if not i.filename.endswith(dsc_name):
2687                         ql.remove(i)
2688
2689                 if len(ql) > 0:
2690                     # Unfortunately, we may get more than one match here if,
2691                     # for example, the package was in potato but had an -sa
2692                     # upload in woody.  So we need to choose the right one.
2693
2694                     # default to something sane in case we don't match any or have only one
2695                     x = ql[0]
2696
2697                     if len(ql) > 1:
2698                         for i in ql:
2699                             old_file = os.path.join(i.location.path, i.filename)
2700                             old_file_fh = utils.open_file(old_file)
2701                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2702                             old_file_fh.close()
2703                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2704                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2705                                 x = i
2706
2707                     old_file = os.path.join(i.location.path, i.filename)
2708                     old_file_fh = utils.open_file(old_file)
2709                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2710                     old_file_fh.close()
2711                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2712                     found = old_file
2713                     suite_type = x.location.archive_type
2714                     # need this for updating dsc_files in install()
2715                     dsc_entry["files id"] = x.file_id
2716                     # See install() in process-accepted...
2717                     if not orig_files.has_key(dsc_name):
2718                         orig_files[dsc_name] = {}
2719                     orig_files[dsc_name]["id"] = x.file_id
2720                     orig_files[dsc_name]["path"] = old_file
2721                     orig_files[dsc_name]["location"] = x.location.location_id
2722                 else:
2723                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2724                     # Not there? Check the queue directories...
2725                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2726                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2727                             continue
2728                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2729                         if os.path.exists(in_otherdir):
2730                             in_otherdir_fh = utils.open_file(in_otherdir)
2731                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2732                             in_otherdir_fh.close()
2733                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2734                             found = in_otherdir
2735                             if not orig_files.has_key(dsc_name):
2736                                 orig_files[dsc_name] = {}
2737                             orig_files[dsc_name]["path"] = in_otherdir
2738
2739                     if not found:
2740                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2741                         continue
2742             else:
2743                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2744                 continue
2745             if actual_md5 != dsc_entry["md5sum"]:
2746                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2747             if actual_size != int(dsc_entry["size"]):
2748                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2749
2750     ################################################################################
2751     # This is used by process-new and process-holding to recheck a changes file
2752     # at the time we're running.  It mainly wraps various other internal functions
2753     # and is similar to accepted_checks - these should probably be tidied up
2754     # and combined
2755     def recheck(self, session):
2756         cnf = Config()
2757         for f in self.pkg.files.keys():
2758             # The .orig.tar.gz can disappear out from under us is it's a
2759             # duplicate of one in the archive.
2760             if not self.pkg.files.has_key(f):
2761                 continue
2762
2763             entry = self.pkg.files[f]
2764
2765             # Check that the source still exists
2766             if entry["type"] == "deb":
2767                 source_version = entry["source version"]
2768                 source_package = entry["source package"]
2769                 if not self.pkg.changes["architecture"].has_key("source") \
2770                    and not source_exists(source_package, source_version, \
2771                     suites = self.pkg.changes["distribution"].keys(), session = session):
2772                     source_epochless_version = re_no_epoch.sub('', source_version)
2773                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2774                     found = False
2775                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2776                         if cnf.has_key("Dir::Queue::%s" % (q)):
2777                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2778                                 found = True
2779                     if not found:
2780                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2781
2782             # Version and file overwrite checks
2783             if entry["type"] == "deb":
2784                 self.check_binary_against_db(f, session)
2785             elif entry["type"] == "dsc":
2786                 self.check_source_against_db(f, session)
2787                 self.check_dsc_against_db(f, session)
2788
2789     ################################################################################
2790     def accepted_checks(self, overwrite_checks, session):
2791         # Recheck anything that relies on the database; since that's not
2792         # frozen between accept and our run time when called from p-a.
2793
2794         # overwrite_checks is set to False when installing to stable/oldstable
2795
2796         propogate={}
2797         nopropogate={}
2798
2799         # Find the .dsc (again)
2800         dsc_filename = None
2801         for f in self.pkg.files.keys():
2802             if self.pkg.files[f]["type"] == "dsc":
2803                 dsc_filename = f
2804
2805         for checkfile in self.pkg.files.keys():
2806             # The .orig.tar.gz can disappear out from under us is it's a
2807             # duplicate of one in the archive.
2808             if not self.pkg.files.has_key(checkfile):
2809                 continue
2810
2811             entry = self.pkg.files[checkfile]
2812
2813             # Check that the source still exists
2814             if entry["type"] == "deb":
2815                 source_version = entry["source version"]
2816                 source_package = entry["source package"]
2817                 if not self.pkg.changes["architecture"].has_key("source") \
2818                    and not source_exists(source_package, source_version, \
2819                     suites = self.pkg.changes["distribution"].keys(), \
2820                     session = session):
2821                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2822
2823             # Version and file overwrite checks
2824             if overwrite_checks:
2825                 if entry["type"] == "deb":
2826                     self.check_binary_against_db(checkfile, session)
2827                 elif entry["type"] == "dsc":
2828                     self.check_source_against_db(checkfile, session)
2829                     self.check_dsc_against_db(dsc_filename, session)
2830
2831             # propogate in the case it is in the override tables:
2832             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2833                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2834                     propogate[suite] = 1
2835                 else:
2836                     nopropogate[suite] = 1
2837
2838         for suite in propogate.keys():
2839             if suite in nopropogate:
2840                 continue
2841             self.pkg.changes["distribution"][suite] = 1
2842
2843         for checkfile in self.pkg.files.keys():
2844             # Check the package is still in the override tables
2845             for suite in self.pkg.changes["distribution"].keys():
2846                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2847                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2848
2849     ################################################################################
2850     # If any file of an upload has a recent mtime then chances are good
2851     # the file is still being uploaded.
2852
2853     def upload_too_new(self):
2854         cnf = Config()
2855         too_new = False
2856         # Move back to the original directory to get accurate time stamps
2857         cwd = os.getcwd()
2858         os.chdir(self.pkg.directory)
2859         file_list = self.pkg.files.keys()
2860         file_list.extend(self.pkg.dsc_files.keys())
2861         file_list.append(self.pkg.changes_file)
2862         for f in file_list:
2863             try:
2864                 last_modified = time.time()-os.path.getmtime(f)
2865                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2866                     too_new = True
2867                     break
2868             except:
2869                 pass
2870
2871         os.chdir(cwd)
2872         return too_new
2873
2874     def store_changelog(self):
2875
2876         # Skip binary-only upload if it is not a bin-NMU
2877         if not self.pkg.changes['architecture'].has_key('source'):
2878             from daklib.regexes import re_bin_only_nmu
2879             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2880                 return
2881
2882         session = DBConn().session()
2883
2884         # Check if upload already has a changelog entry
2885         query = """SELECT changelog_id FROM changes WHERE source = :source
2886                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2887         if session.execute(query, {'source': self.pkg.changes['source'], \
2888                                    'version': self.pkg.changes['version'], \
2889                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2890             session.commit()
2891             return
2892
2893         # Add current changelog text into changelogs_text table, return created ID
2894         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2895         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2896
2897         # Link ID to the upload available in changes table
2898         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2899                    AND version = :version AND architecture = :architecture"""
2900         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2901                                 'version': self.pkg.changes['version'], \
2902                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2903
2904         session.commit()