]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
dfbe36853490c8b9b99c72eebb50daa45d60c9de
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57
58 # suppress some deprecation warnings in squeeze related to apt_pkg
59 # module
60 import warnings
61 warnings.filterwarnings('ignore', \
62     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
63     DeprecationWarning)
64 warnings.filterwarnings('ignore', \
65     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
66     DeprecationWarning)
67
68 ###############################################################################
69
70 def get_type(f, session):
71     """
72     Get the file type of C{f}
73
74     @type f: dict
75     @param f: file entry from Changes object
76
77     @type session: SQLA Session
78     @param session: SQL Alchemy session object
79
80     @rtype: string
81     @return: filetype
82
83     """
84     # Determine the type
85     if f.has_key("dbtype"):
86         file_type = f["dbtype"]
87     elif re_source_ext.match(f["type"]):
88         file_type = "dsc"
89     else:
90         file_type = f["type"]
91         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
92
93     # Validate the override type
94     type_id = get_override_type(file_type, session)
95     if type_id is None:
96         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
97
98     return file_type
99
100 ################################################################################
101
102 # Determine what parts in a .changes are NEW
103
104 def determine_new(filename, changes, files, warn=1, session = None):
105     """
106     Determine what parts in a C{changes} file are NEW.
107
108     @type filename: str
109     @param filename: changes filename
110
111     @type changes: Upload.Pkg.changes dict
112     @param changes: Changes dictionary
113
114     @type files: Upload.Pkg.files dict
115     @param files: Files dictionary
116
117     @type warn: bool
118     @param warn: Warn if overrides are added for (old)stable
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     new = {}
127     byhand = {}
128
129     dbchg = get_dbchange(filename, session)
130     if dbchg is None:
131         print "Warning: cannot find changes file in database; won't check byhand"
132
133     # Build up a list of potentially new things
134     for name, f in files.items():
135         # Keep a record of byhand elements
136         if f["section"] == "byhand":
137             byhand[name] = 1
138             continue
139
140         pkg = f["package"]
141         priority = f["priority"]
142         section = f["section"]
143         file_type = get_type(f, session)
144         component = f["component"]
145
146         if file_type == "dsc":
147             priority = "source"
148
149         if not new.has_key(pkg):
150             new[pkg] = {}
151             new[pkg]["priority"] = priority
152             new[pkg]["section"] = section
153             new[pkg]["type"] = file_type
154             new[pkg]["component"] = component
155             new[pkg]["files"] = []
156         else:
157             old_type = new[pkg]["type"]
158             if old_type != file_type:
159                 # source gets trumped by deb or udeb
160                 if old_type == "dsc":
161                     new[pkg]["priority"] = priority
162                     new[pkg]["section"] = section
163                     new[pkg]["type"] = file_type
164                     new[pkg]["component"] = component
165
166         new[pkg]["files"].append(name)
167
168         if f.has_key("othercomponents"):
169             new[pkg]["othercomponents"] = f["othercomponents"]
170
171     # Fix up the list of target suites
172     cnf = Config()
173     for suite in changes["suite"].keys():
174         oldsuite = get_suite(suite, session)
175         if not oldsuite:
176             print "WARNING: Invalid suite %s found" % suite
177             continue
178
179         if oldsuite.overridesuite:
180             newsuite = get_suite(oldsuite.overridesuite, session)
181
182             if newsuite:
183                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
184                     oldsuite.overridesuite, suite)
185                 del changes["suite"][suite]
186                 changes["suite"][oldsuite.overridesuite] = 1
187             else:
188                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
189                     oldsuite.overridesuite, suite)
190
191     # Check for unprocessed byhand files
192     if dbchg is not None:
193         for b in byhand.keys():
194             # Find the file entry in the database
195             found = False
196             for f in dbchg.files:
197                 if f.filename == b:
198                     found = True
199                     # If it's processed, we can ignore it
200                     if f.processed:
201                         del byhand[b]
202                     break
203
204             if not found:
205                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
206
207     # Check for new stuff
208     for suite in changes["suite"].keys():
209         for pkg in new.keys():
210             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
211             if len(ql) > 0:
212                 for file_entry in new[pkg]["files"]:
213                     if files[file_entry].has_key("new"):
214                         del files[file_entry]["new"]
215                 del new[pkg]
216
217     if warn:
218         for s in ['stable', 'oldstable']:
219             if changes["suite"].has_key(s):
220                 print "WARNING: overrides will be added for %s!" % s
221         for pkg in new.keys():
222             if new[pkg].has_key("othercomponents"):
223                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
224
225     return new, byhand
226
227 ################################################################################
228
229 def check_valid(new, session = None):
230     """
231     Check if section and priority for NEW packages exist in database.
232     Additionally does sanity checks:
233       - debian-installer packages have to be udeb (or source)
234       - non debian-installer packages can not be udeb
235       - source priority can only be assigned to dsc file types
236
237     @type new: dict
238     @param new: Dict of new packages with their section, priority and type.
239
240     """
241     for pkg in new.keys():
242         section_name = new[pkg]["section"]
243         priority_name = new[pkg]["priority"]
244         file_type = new[pkg]["type"]
245
246         section = get_section(section_name, session)
247         if section is None:
248             new[pkg]["section id"] = -1
249         else:
250             new[pkg]["section id"] = section.section_id
251
252         priority = get_priority(priority_name, session)
253         if priority is None:
254             new[pkg]["priority id"] = -1
255         else:
256             new[pkg]["priority id"] = priority.priority_id
257
258         # Sanity checks
259         di = section_name.find("debian-installer") != -1
260
261         # If d-i, we must be udeb and vice-versa
262         if     (di and file_type not in ("udeb", "dsc")) or \
263            (not di and file_type == "udeb"):
264             new[pkg]["section id"] = -1
265
266         # If dsc we need to be source and vice-versa
267         if (priority == "source" and file_type != "dsc") or \
268            (priority != "source" and file_type == "dsc"):
269             new[pkg]["priority id"] = -1
270
271 ###############################################################################
272
273 # Used by Upload.check_timestamps
274 class TarTime(object):
275     def __init__(self, future_cutoff, past_cutoff):
276         self.reset()
277         self.future_cutoff = future_cutoff
278         self.past_cutoff = past_cutoff
279
280     def reset(self):
281         self.future_files = {}
282         self.ancient_files = {}
283
284     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
285         if MTime > self.future_cutoff:
286             self.future_files[Name] = MTime
287         if MTime < self.past_cutoff:
288             self.ancient_files[Name] = MTime
289
290 ###############################################################################
291
292 def prod_maintainer(notes, upload):
293     cnf = Config()
294
295     # Here we prepare an editor and get them ready to prod...
296     (fd, temp_filename) = utils.temp_filename()
297     temp_file = os.fdopen(fd, 'w')
298     for note in notes:
299         temp_file.write(note.comment)
300     temp_file.close()
301     editor = os.environ.get("EDITOR","vi")
302     answer = 'E'
303     while answer == 'E':
304         os.system("%s %s" % (editor, temp_filename))
305         temp_fh = utils.open_file(temp_filename)
306         prod_message = "".join(temp_fh.readlines())
307         temp_fh.close()
308         print "Prod message:"
309         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
310         prompt = "[P]rod, Edit, Abandon, Quit ?"
311         answer = "XXX"
312         while prompt.find(answer) == -1:
313             answer = utils.our_raw_input(prompt)
314             m = re_default_answer.search(prompt)
315             if answer == "":
316                 answer = m.group(1)
317             answer = answer[:1].upper()
318     os.unlink(temp_filename)
319     if answer == 'A':
320         return
321     elif answer == 'Q':
322         end()
323         sys.exit(0)
324     # Otherwise, do the proding...
325     user_email_address = utils.whoami() + " <%s>" % (
326         cnf["Dinstall::MyAdminAddress"])
327
328     Subst = upload.Subst
329
330     Subst["__FROM_ADDRESS__"] = user_email_address
331     Subst["__PROD_MESSAGE__"] = prod_message
332     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
333
334     prod_mail_message = utils.TemplateSubst(
335         Subst,cnf["Dir::Templates"]+"/process-new.prod")
336
337     # Send the prod mail
338     utils.send_mail(prod_mail_message)
339
340     print "Sent prodding message"
341
342 ################################################################################
343
344 def edit_note(note, upload, session, trainee=False):
345     # Write the current data to a temporary file
346     (fd, temp_filename) = utils.temp_filename()
347     editor = os.environ.get("EDITOR","vi")
348     answer = 'E'
349     while answer == 'E':
350         os.system("%s %s" % (editor, temp_filename))
351         temp_file = utils.open_file(temp_filename)
352         newnote = temp_file.read().rstrip()
353         temp_file.close()
354         print "New Note:"
355         print utils.prefix_multi_line_string(newnote,"  ")
356         prompt = "[D]one, Edit, Abandon, Quit ?"
357         answer = "XXX"
358         while prompt.find(answer) == -1:
359             answer = utils.our_raw_input(prompt)
360             m = re_default_answer.search(prompt)
361             if answer == "":
362                 answer = m.group(1)
363             answer = answer[:1].upper()
364     os.unlink(temp_filename)
365     if answer == 'A':
366         return
367     elif answer == 'Q':
368         end()
369         sys.exit(0)
370
371     comment = NewComment()
372     comment.package = upload.pkg.changes["source"]
373     comment.version = upload.pkg.changes["version"]
374     comment.comment = newnote
375     comment.author  = utils.whoami()
376     comment.trainee = trainee
377     session.add(comment)
378     session.commit()
379
380 ###############################################################################
381
382 # suite names DMs can upload to
383 dm_suites = ['unstable', 'experimental']
384
385 def get_newest_source(source, session):
386     'returns the newest DBSource object in dm_suites'
387     ## the most recent version of the package uploaded to unstable or
388     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
389     ## section of its control file
390     q = session.query(DBSource).filter_by(source = source). \
391         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
392         order_by(desc('source.version'))
393     return q.first()
394
395 def get_suite_version_by_source(source, session):
396     'returns a list of tuples (suite_name, version) for source package'
397     q = session.query(Suite.suite_name, DBSource.version). \
398         join(Suite.sources).filter_by(source = source)
399     return q.all()
400
401 def get_source_by_package_and_suite(package, suite_name, session):
402     '''
403     returns a DBSource query filtered by DBBinary.package and this package's
404     suite_name
405     '''
406     return session.query(DBSource). \
407         join(DBSource.binaries).filter_by(package = package). \
408         join(DBBinary.suites).filter_by(suite_name = suite_name)
409
410 def get_suite_version_by_package(package, arch_string, session):
411     '''
412     returns a list of tuples (suite_name, version) for binary package and
413     arch_string
414     '''
415     return session.query(Suite.suite_name, DBBinary.version). \
416         join(Suite.binaries).filter_by(package = package). \
417         join(DBBinary.architecture). \
418         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
419
420 class Upload(object):
421     """
422     Everything that has to do with an upload processed.
423
424     """
425     def __init__(self):
426         self.logger = None
427         self.pkg = Changes()
428         self.reset()
429
430     ###########################################################################
431
432     def reset (self):
433         """ Reset a number of internal variables."""
434
435         # Initialize the substitution template map
436         cnf = Config()
437         self.Subst = {}
438         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
439         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
440         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
441         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
442
443         self.rejects = []
444         self.warnings = []
445         self.notes = []
446
447         self.later_check_files = []
448
449         self.pkg.reset()
450
451     def package_info(self):
452         """
453         Format various messages from this Upload to send to the maintainer.
454         """
455
456         msgs = (
457             ('Reject Reasons', self.rejects),
458             ('Warnings', self.warnings),
459             ('Notes', self.notes),
460         )
461
462         msg = ''
463         for title, messages in msgs:
464             if messages:
465                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
466         msg += '\n\n'
467
468         return msg
469
470     ###########################################################################
471     def update_subst(self):
472         """ Set up the per-package template substitution mappings """
473
474         cnf = Config()
475
476         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
477         if not self.pkg.changes.has_key("architecture") or not \
478            isinstance(self.pkg.changes["architecture"], dict):
479             self.pkg.changes["architecture"] = { "Unknown" : "" }
480
481         # and maintainer2047 may not exist.
482         if not self.pkg.changes.has_key("maintainer2047"):
483             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
484
485         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
486         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
487         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
488
489         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
490         if self.pkg.changes["architecture"].has_key("source") and \
491            self.pkg.changes["changedby822"] != "" and \
492            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
493
494             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
495             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
496             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
497         else:
498             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
499             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
500             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
501
502         # Process policy doesn't set the fingerprint field and I don't want to make it
503         # do it for now as I don't want to have to deal with the case where we accepted
504         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
505         # the meantime so the package will be remarked as rejectable.  Urgh.
506         # TODO: Fix this properly
507         if self.pkg.changes.has_key('fingerprint'):
508             session = DBConn().session()
509             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
510             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
511                 if self.pkg.changes.has_key("sponsoremail"):
512                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
513             session.close()
514
515         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
516             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
517
518         # Apply any global override of the Maintainer field
519         if cnf.get("Dinstall::OverrideMaintainer"):
520             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
521             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
522
523         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
524         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
525         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
526         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
527
528     ###########################################################################
529     def load_changes(self, filename):
530         """
531         Load a changes file and setup a dictionary around it. Also checks for mandantory
532         fields  within.
533
534         @type filename: string
535         @param filename: Changes filename, full path.
536
537         @rtype: boolean
538         @return: whether the changes file was valid or not.  We may want to
539                  reject even if this is True (see what gets put in self.rejects).
540                  This is simply to prevent us even trying things later which will
541                  fail because we couldn't properly parse the file.
542         """
543         Cnf = Config()
544         self.pkg.changes_file = filename
545
546         # Parse the .changes field into a dictionary
547         try:
548             self.pkg.changes.update(parse_changes(filename))
549         except CantOpenError:
550             self.rejects.append("%s: can't read file." % (filename))
551             return False
552         except ParseChangesError, line:
553             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
554             return False
555         except ChangesUnicodeError:
556             self.rejects.append("%s: changes file not proper utf-8" % (filename))
557             return False
558
559         # Parse the Files field from the .changes into another dictionary
560         try:
561             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
562         except ParseChangesError, line:
563             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
564             return False
565         except UnknownFormatError, format:
566             self.rejects.append("%s: unknown format '%s'." % (filename, format))
567             return False
568
569         # Check for mandatory fields
570         for i in ("distribution", "source", "binary", "architecture",
571                   "version", "maintainer", "files", "changes", "description"):
572             if not self.pkg.changes.has_key(i):
573                 # Avoid undefined errors later
574                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
575                 return False
576
577         # Strip a source version in brackets from the source field
578         if re_strip_srcver.search(self.pkg.changes["source"]):
579             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
580
581         # Ensure the source field is a valid package name.
582         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
583             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
584
585         # Split multi-value fields into a lower-level dictionary
586         for i in ("architecture", "distribution", "binary", "closes"):
587             o = self.pkg.changes.get(i, "")
588             if o != "":
589                 del self.pkg.changes[i]
590
591             self.pkg.changes[i] = {}
592
593             for j in o.split():
594                 self.pkg.changes[i][j] = 1
595
596         # Fix the Maintainer: field to be RFC822/2047 compatible
597         try:
598             (self.pkg.changes["maintainer822"],
599              self.pkg.changes["maintainer2047"],
600              self.pkg.changes["maintainername"],
601              self.pkg.changes["maintaineremail"]) = \
602                    fix_maintainer (self.pkg.changes["maintainer"])
603         except ParseMaintError, msg:
604             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
605                    % (filename, self.pkg.changes["maintainer"], msg))
606
607         # ...likewise for the Changed-By: field if it exists.
608         try:
609             (self.pkg.changes["changedby822"],
610              self.pkg.changes["changedby2047"],
611              self.pkg.changes["changedbyname"],
612              self.pkg.changes["changedbyemail"]) = \
613                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
614         except ParseMaintError, msg:
615             self.pkg.changes["changedby822"] = ""
616             self.pkg.changes["changedby2047"] = ""
617             self.pkg.changes["changedbyname"] = ""
618             self.pkg.changes["changedbyemail"] = ""
619
620             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
621                    % (filename, self.pkg.changes["changed-by"], msg))
622
623         # Ensure all the values in Closes: are numbers
624         if self.pkg.changes.has_key("closes"):
625             for i in self.pkg.changes["closes"].keys():
626                 if re_isanum.match (i) == None:
627                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
628
629         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
630         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
631         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
632
633         # Check the .changes is non-empty
634         if not self.pkg.files:
635             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
636             return False
637
638         # Changes was syntactically valid even if we'll reject
639         return True
640
641     ###########################################################################
642
643     def check_distributions(self):
644         "Check and map the Distribution field"
645
646         Cnf = Config()
647
648         # Handle suite mappings
649         for m in Cnf.ValueList("SuiteMappings"):
650             args = m.split()
651             mtype = args[0]
652             if mtype == "map" or mtype == "silent-map":
653                 (source, dest) = args[1:3]
654                 if self.pkg.changes["distribution"].has_key(source):
655                     del self.pkg.changes["distribution"][source]
656                     self.pkg.changes["distribution"][dest] = 1
657                     if mtype != "silent-map":
658                         self.notes.append("Mapping %s to %s." % (source, dest))
659                 if self.pkg.changes.has_key("distribution-version"):
660                     if self.pkg.changes["distribution-version"].has_key(source):
661                         self.pkg.changes["distribution-version"][source]=dest
662             elif mtype == "map-unreleased":
663                 (source, dest) = args[1:3]
664                 if self.pkg.changes["distribution"].has_key(source):
665                     for arch in self.pkg.changes["architecture"].keys():
666                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
667                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
668                             del self.pkg.changes["distribution"][source]
669                             self.pkg.changes["distribution"][dest] = 1
670                             break
671             elif mtype == "ignore":
672                 suite = args[1]
673                 if self.pkg.changes["distribution"].has_key(suite):
674                     del self.pkg.changes["distribution"][suite]
675                     self.warnings.append("Ignoring %s as a target suite." % (suite))
676             elif mtype == "reject":
677                 suite = args[1]
678                 if self.pkg.changes["distribution"].has_key(suite):
679                     self.rejects.append("Uploads to %s are not accepted." % (suite))
680             elif mtype == "propup-version":
681                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
682                 #
683                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
684                 if self.pkg.changes["distribution"].has_key(args[1]):
685                     self.pkg.changes.setdefault("distribution-version", {})
686                     for suite in args[2:]:
687                         self.pkg.changes["distribution-version"][suite] = suite
688
689         # Ensure there is (still) a target distribution
690         if len(self.pkg.changes["distribution"].keys()) < 1:
691             self.rejects.append("No valid distribution remaining.")
692
693         # Ensure target distributions exist
694         for suite in self.pkg.changes["distribution"].keys():
695             if not Cnf.has_key("Suite::%s" % (suite)):
696                 self.rejects.append("Unknown distribution `%s'." % (suite))
697
698     ###########################################################################
699
700     def binary_file_checks(self, f, session):
701         cnf = Config()
702         entry = self.pkg.files[f]
703
704         # Extract package control information
705         deb_file = utils.open_file(f)
706         try:
707             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
708         except:
709             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
710             deb_file.close()
711             # Can't continue, none of the checks on control would work.
712             return
713
714         # Check for mandantory "Description:"
715         deb_file.seek(0)
716         try:
717             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
718         except:
719             self.rejects.append("%s: Missing Description in binary package" % (f))
720             return
721
722         deb_file.close()
723
724         # Check for mandatory fields
725         for field in [ "Package", "Architecture", "Version" ]:
726             if control.Find(field) == None:
727                 # Can't continue
728                 self.rejects.append("%s: No %s field in control." % (f, field))
729                 return
730
731         # Ensure the package name matches the one give in the .changes
732         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
733             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
734
735         # Validate the package field
736         package = control.Find("Package")
737         if not re_valid_pkg_name.match(package):
738             self.rejects.append("%s: invalid package name '%s'." % (f, package))
739
740         # Validate the version field
741         version = control.Find("Version")
742         if not re_valid_version.match(version):
743             self.rejects.append("%s: invalid version number '%s'." % (f, version))
744
745         # Ensure the architecture of the .deb is one we know about.
746         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
747         architecture = control.Find("Architecture")
748         upload_suite = self.pkg.changes["distribution"].keys()[0]
749
750         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
751             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
752             self.rejects.append("Unknown architecture '%s'." % (architecture))
753
754         # Ensure the architecture of the .deb is one of the ones
755         # listed in the .changes.
756         if not self.pkg.changes["architecture"].has_key(architecture):
757             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
758
759         # Sanity-check the Depends field
760         depends = control.Find("Depends")
761         if depends == '':
762             self.rejects.append("%s: Depends field is empty." % (f))
763
764         # Sanity-check the Provides field
765         provides = control.Find("Provides")
766         if provides:
767             provide = re_spacestrip.sub('', provides)
768             if provide == '':
769                 self.rejects.append("%s: Provides field is empty." % (f))
770             prov_list = provide.split(",")
771             for prov in prov_list:
772                 if not re_valid_pkg_name.match(prov):
773                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
774
775         # If there is a Built-Using field, we need to check we can find the
776         # exact source version
777         built_using = control.Find("Built-Using")
778         if built_using:
779             try:
780                 entry["built-using"] = []
781                 for dep in apt_pkg.parse_depends(built_using):
782                     bu_s, bu_v, bu_e = dep[0]
783                     # Check that it's an exact match dependency and we have
784                     # some form of version
785                     if bu_e != "=" or len(bu_v) < 1:
786                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
787                     else:
788                         # Find the source id for this version
789                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
790                         if len(bu_so) != 1:
791                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
792                         else:
793                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
794
795             except ValueError, e:
796                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
797
798
799         # Check the section & priority match those given in the .changes (non-fatal)
800         if     control.Find("Section") and entry["section"] != "" \
801            and entry["section"] != control.Find("Section"):
802             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
803                                 (f, control.Find("Section", ""), entry["section"]))
804         if control.Find("Priority") and entry["priority"] != "" \
805            and entry["priority"] != control.Find("Priority"):
806             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
807                                 (f, control.Find("Priority", ""), entry["priority"]))
808
809         entry["package"] = package
810         entry["architecture"] = architecture
811         entry["version"] = version
812         entry["maintainer"] = control.Find("Maintainer", "")
813
814         if f.endswith(".udeb"):
815             self.pkg.files[f]["dbtype"] = "udeb"
816         elif f.endswith(".deb"):
817             self.pkg.files[f]["dbtype"] = "deb"
818         else:
819             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
820
821         entry["source"] = control.Find("Source", entry["package"])
822
823         # Get the source version
824         source = entry["source"]
825         source_version = ""
826
827         if source.find("(") != -1:
828             m = re_extract_src_version.match(source)
829             source = m.group(1)
830             source_version = m.group(2)
831
832         if not source_version:
833             source_version = self.pkg.files[f]["version"]
834
835         entry["source package"] = source
836         entry["source version"] = source_version
837
838         # Ensure the filename matches the contents of the .deb
839         m = re_isadeb.match(f)
840
841         #  package name
842         file_package = m.group(1)
843         if entry["package"] != file_package:
844             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
845                                 (f, file_package, entry["dbtype"], entry["package"]))
846         epochless_version = re_no_epoch.sub('', control.Find("Version"))
847
848         #  version
849         file_version = m.group(2)
850         if epochless_version != file_version:
851             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
852                                 (f, file_version, entry["dbtype"], epochless_version))
853
854         #  architecture
855         file_architecture = m.group(3)
856         if entry["architecture"] != file_architecture:
857             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
858                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
859
860         # Check for existent source
861         source_version = entry["source version"]
862         source_package = entry["source package"]
863         if self.pkg.changes["architecture"].has_key("source"):
864             if source_version != self.pkg.changes["version"]:
865                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
866                                     (source_version, f, self.pkg.changes["version"]))
867         else:
868             # Check in the SQL database
869             if not source_exists(source_package, source_version, suites = \
870                 self.pkg.changes["distribution"].keys(), session = session):
871                 # Check in one of the other directories
872                 source_epochless_version = re_no_epoch.sub('', source_version)
873                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
874                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
875                     entry["byhand"] = 1
876                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
877                     entry["new"] = 1
878                 else:
879                     dsc_file_exists = False
880                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
881                         if cnf.has_key("Dir::Queue::%s" % (myq)):
882                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
883                                 dsc_file_exists = True
884                                 break
885
886                     if not dsc_file_exists:
887                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
888
889         # Check the version and for file overwrites
890         self.check_binary_against_db(f, session)
891
892     def source_file_checks(self, f, session):
893         entry = self.pkg.files[f]
894
895         m = re_issource.match(f)
896         if not m:
897             return
898
899         entry["package"] = m.group(1)
900         entry["version"] = m.group(2)
901         entry["type"] = m.group(3)
902
903         # Ensure the source package name matches the Source filed in the .changes
904         if self.pkg.changes["source"] != entry["package"]:
905             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
906
907         # Ensure the source version matches the version in the .changes file
908         if re_is_orig_source.match(f):
909             changes_version = self.pkg.changes["chopversion2"]
910         else:
911             changes_version = self.pkg.changes["chopversion"]
912
913         if changes_version != entry["version"]:
914             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
915
916         # Ensure the .changes lists source in the Architecture field
917         if not self.pkg.changes["architecture"].has_key("source"):
918             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
919
920         # Check the signature of a .dsc file
921         if entry["type"] == "dsc":
922             # check_signature returns either:
923             #  (None, [list, of, rejects]) or (signature, [])
924             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
925             for j in rejects:
926                 self.rejects.append(j)
927
928         entry["architecture"] = "source"
929
930     def per_suite_file_checks(self, f, suite, session):
931         cnf = Config()
932         entry = self.pkg.files[f]
933
934         # Skip byhand
935         if entry.has_key("byhand"):
936             return
937
938         # Check we have fields we need to do these checks
939         oktogo = True
940         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
941             if not entry.has_key(m):
942                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
943                 oktogo = False
944
945         if not oktogo:
946             return
947
948         # Handle component mappings
949         for m in cnf.ValueList("ComponentMappings"):
950             (source, dest) = m.split()
951             if entry["component"] == source:
952                 entry["original component"] = source
953                 entry["component"] = dest
954
955         # Ensure the component is valid for the target suite
956         if cnf.has_key("Suite:%s::Components" % (suite)) and \
957            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
958             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
959             return
960
961         # Validate the component
962         if not get_component(entry["component"], session):
963             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
964             return
965
966         # See if the package is NEW
967         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
968             entry["new"] = 1
969
970         # Validate the priority
971         if entry["priority"].find('/') != -1:
972             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
973
974         # Determine the location
975         location = cnf["Dir::Pool"]
976         l = get_location(location, entry["component"], session=session)
977         if l is None:
978             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
979             entry["location id"] = -1
980         else:
981             entry["location id"] = l.location_id
982
983         # Check the md5sum & size against existing files (if any)
984         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
985
986         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
987                                          entry["size"], entry["md5sum"], entry["location id"])
988
989         if found is None:
990             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
991         elif found is False and poolfile is not None:
992             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
993         else:
994             if poolfile is None:
995                 entry["files id"] = None
996             else:
997                 entry["files id"] = poolfile.file_id
998
999         # Check for packages that have moved from one component to another
1000         entry['suite'] = suite
1001         arch_list = [entry["architecture"], 'all']
1002         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1003             [suite], arch_list = arch_list, session = session)
1004         if component is not None:
1005             entry["othercomponents"] = component
1006
1007     def check_files(self, action=True):
1008         file_keys = self.pkg.files.keys()
1009         holding = Holding()
1010         cnf = Config()
1011
1012         if action:
1013             cwd = os.getcwd()
1014             os.chdir(self.pkg.directory)
1015             for f in file_keys:
1016                 ret = holding.copy_to_holding(f)
1017                 if ret is not None:
1018                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1019
1020             os.chdir(cwd)
1021
1022         # check we already know the changes file
1023         # [NB: this check must be done post-suite mapping]
1024         base_filename = os.path.basename(self.pkg.changes_file)
1025
1026         session = DBConn().session()
1027
1028         try:
1029             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1030             # if in the pool or in a queue other than unchecked, reject
1031             if (dbc.in_queue is None) \
1032                    or (dbc.in_queue is not None
1033                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1034                 self.rejects.append("%s file already known to dak" % base_filename)
1035         except NoResultFound, e:
1036             # not known, good
1037             pass
1038
1039         has_binaries = False
1040         has_source = False
1041
1042         for f, entry in self.pkg.files.items():
1043             # Ensure the file does not already exist in one of the accepted directories
1044             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1045                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1046                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1047                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1048
1049             if not re_taint_free.match(f):
1050                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1051
1052             # Check the file is readable
1053             if os.access(f, os.R_OK) == 0:
1054                 # When running in -n, copy_to_holding() won't have
1055                 # generated the reject_message, so we need to.
1056                 if action:
1057                     if os.path.exists(f):
1058                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1059                     else:
1060                         # Don't directly reject, mark to check later to deal with orig's
1061                         # we can find in the pool
1062                         self.later_check_files.append(f)
1063                 entry["type"] = "unreadable"
1064                 continue
1065
1066             # If it's byhand skip remaining checks
1067             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1068                 entry["byhand"] = 1
1069                 entry["type"] = "byhand"
1070
1071             # Checks for a binary package...
1072             elif re_isadeb.match(f):
1073                 has_binaries = True
1074                 entry["type"] = "deb"
1075
1076                 # This routine appends to self.rejects/warnings as appropriate
1077                 self.binary_file_checks(f, session)
1078
1079             # Checks for a source package...
1080             elif re_issource.match(f):
1081                 has_source = True
1082
1083                 # This routine appends to self.rejects/warnings as appropriate
1084                 self.source_file_checks(f, session)
1085
1086             # Not a binary or source package?  Assume byhand...
1087             else:
1088                 entry["byhand"] = 1
1089                 entry["type"] = "byhand"
1090
1091             # Per-suite file checks
1092             entry["oldfiles"] = {}
1093             for suite in self.pkg.changes["distribution"].keys():
1094                 self.per_suite_file_checks(f, suite, session)
1095
1096         session.close()
1097
1098         # If the .changes file says it has source, it must have source.
1099         if self.pkg.changes["architecture"].has_key("source"):
1100             if not has_source:
1101                 self.rejects.append("no source found and Architecture line in changes mention source.")
1102
1103             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1104                 self.rejects.append("source only uploads are not supported.")
1105
1106     ###########################################################################
1107     def check_dsc(self, action=True, session=None):
1108         """Returns bool indicating whether or not the source changes are valid"""
1109         # Ensure there is source to check
1110         if not self.pkg.changes["architecture"].has_key("source"):
1111             return True
1112
1113         # Find the .dsc
1114         dsc_filename = None
1115         for f, entry in self.pkg.files.items():
1116             if entry["type"] == "dsc":
1117                 if dsc_filename:
1118                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1119                     return False
1120                 else:
1121                     dsc_filename = f
1122
1123         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1124         if not dsc_filename:
1125             self.rejects.append("source uploads must contain a dsc file")
1126             return False
1127
1128         # Parse the .dsc file
1129         try:
1130             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1131         except CantOpenError:
1132             # if not -n copy_to_holding() will have done this for us...
1133             if not action:
1134                 self.rejects.append("%s: can't read file." % (dsc_filename))
1135         except ParseChangesError, line:
1136             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1137         except InvalidDscError, line:
1138             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1139         except ChangesUnicodeError:
1140             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1141
1142         # Build up the file list of files mentioned by the .dsc
1143         try:
1144             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1145         except NoFilesFieldError:
1146             self.rejects.append("%s: no Files: field." % (dsc_filename))
1147             return False
1148         except UnknownFormatError, format:
1149             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1150             return False
1151         except ParseChangesError, line:
1152             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1153             return False
1154
1155         # Enforce mandatory fields
1156         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1157             if not self.pkg.dsc.has_key(i):
1158                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1159                 return False
1160
1161         # Validate the source and version fields
1162         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1163             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1164         if not re_valid_version.match(self.pkg.dsc["version"]):
1165             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1166
1167         # Only a limited list of source formats are allowed in each suite
1168         for dist in self.pkg.changes["distribution"].keys():
1169             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1170             if self.pkg.dsc["format"] not in allowed:
1171                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1172
1173         # Validate the Maintainer field
1174         try:
1175             # We ignore the return value
1176             fix_maintainer(self.pkg.dsc["maintainer"])
1177         except ParseMaintError, msg:
1178             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1179                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1180
1181         # Validate the build-depends field(s)
1182         for field_name in [ "build-depends", "build-depends-indep" ]:
1183             field = self.pkg.dsc.get(field_name)
1184             if field:
1185                 # Have apt try to parse them...
1186                 try:
1187                     apt_pkg.ParseSrcDepends(field)
1188                 except:
1189                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1190
1191         # Ensure the version number in the .dsc matches the version number in the .changes
1192         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1193         changes_version = self.pkg.files[dsc_filename]["version"]
1194
1195         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1196             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1197
1198         # Ensure the Files field contain only what's expected
1199         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1200
1201         # Ensure source is newer than existing source in target suites
1202         session = DBConn().session()
1203         self.check_source_against_db(dsc_filename, session)
1204         self.check_dsc_against_db(dsc_filename, session)
1205
1206         dbchg = get_dbchange(self.pkg.changes_file, session)
1207
1208         # Finally, check if we're missing any files
1209         for f in self.later_check_files:
1210             print 'XXX: %s' % f
1211             # Check if we've already processed this file if we have a dbchg object
1212             ok = False
1213             if dbchg:
1214                 for pf in dbchg.files:
1215                     if pf.filename == f and pf.processed:
1216                         self.notes.append('%s was already processed so we can go ahead' % f)
1217                         ok = True
1218                         del self.pkg.files[f]
1219             if not ok:
1220                 self.rejects.append("Could not find file %s references in changes" % f)
1221
1222         session.close()
1223
1224         return True
1225
1226     ###########################################################################
1227
1228     def get_changelog_versions(self, source_dir):
1229         """Extracts a the source package and (optionally) grabs the
1230         version history out of debian/changelog for the BTS."""
1231
1232         cnf = Config()
1233
1234         # Find the .dsc (again)
1235         dsc_filename = None
1236         for f in self.pkg.files.keys():
1237             if self.pkg.files[f]["type"] == "dsc":
1238                 dsc_filename = f
1239
1240         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1241         if not dsc_filename:
1242             return
1243
1244         # Create a symlink mirror of the source files in our temporary directory
1245         for f in self.pkg.files.keys():
1246             m = re_issource.match(f)
1247             if m:
1248                 src = os.path.join(source_dir, f)
1249                 # If a file is missing for whatever reason, give up.
1250                 if not os.path.exists(src):
1251                     return
1252                 ftype = m.group(3)
1253                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1254                    self.pkg.orig_files[f].has_key("path"):
1255                     continue
1256                 dest = os.path.join(os.getcwd(), f)
1257                 os.symlink(src, dest)
1258
1259         # If the orig files are not a part of the upload, create symlinks to the
1260         # existing copies.
1261         for orig_file in self.pkg.orig_files.keys():
1262             if not self.pkg.orig_files[orig_file].has_key("path"):
1263                 continue
1264             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1265             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1266
1267         # Extract the source
1268         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1269         (result, output) = commands.getstatusoutput(cmd)
1270         if (result != 0):
1271             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1272             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1273             return
1274
1275         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1276             return
1277
1278         # Get the upstream version
1279         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1280         if re_strip_revision.search(upstr_version):
1281             upstr_version = re_strip_revision.sub('', upstr_version)
1282
1283         # Ensure the changelog file exists
1284         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1285         if not os.path.exists(changelog_filename):
1286             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1287             return
1288
1289         # Parse the changelog
1290         self.pkg.dsc["bts changelog"] = ""
1291         changelog_file = utils.open_file(changelog_filename)
1292         for line in changelog_file.readlines():
1293             m = re_changelog_versions.match(line)
1294             if m:
1295                 self.pkg.dsc["bts changelog"] += line
1296         changelog_file.close()
1297
1298         # Check we found at least one revision in the changelog
1299         if not self.pkg.dsc["bts changelog"]:
1300             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1301
1302     def check_source(self):
1303         # Bail out if:
1304         #    a) there's no source
1305         if not self.pkg.changes["architecture"].has_key("source"):
1306             return
1307
1308         tmpdir = utils.temp_dirname()
1309
1310         # Move into the temporary directory
1311         cwd = os.getcwd()
1312         os.chdir(tmpdir)
1313
1314         # Get the changelog version history
1315         self.get_changelog_versions(cwd)
1316
1317         # Move back and cleanup the temporary tree
1318         os.chdir(cwd)
1319
1320         try:
1321             shutil.rmtree(tmpdir)
1322         except OSError, e:
1323             if e.errno != errno.EACCES:
1324                 print "foobar"
1325                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1326
1327             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1328             # We probably have u-r or u-w directories so chmod everything
1329             # and try again.
1330             cmd = "chmod -R u+rwx %s" % (tmpdir)
1331             result = os.system(cmd)
1332             if result != 0:
1333                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1334             shutil.rmtree(tmpdir)
1335         except Exception, e:
1336             print "foobar2 (%s)" % e
1337             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1338
1339     ###########################################################################
1340     def ensure_hashes(self):
1341         # Make sure we recognise the format of the Files: field in the .changes
1342         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1343         if len(format) == 2:
1344             format = int(format[0]), int(format[1])
1345         else:
1346             format = int(float(format[0])), 0
1347
1348         # We need to deal with the original changes blob, as the fields we need
1349         # might not be in the changes dict serialised into the .dak anymore.
1350         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1351
1352         # Copy the checksums over to the current changes dict.  This will keep
1353         # the existing modifications to it intact.
1354         for field in orig_changes:
1355             if field.startswith('checksums-'):
1356                 self.pkg.changes[field] = orig_changes[field]
1357
1358         # Check for unsupported hashes
1359         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1360             self.rejects.append(j)
1361
1362         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1363             self.rejects.append(j)
1364
1365         # We have to calculate the hash if we have an earlier changes version than
1366         # the hash appears in rather than require it exist in the changes file
1367         for hashname, hashfunc, version in utils.known_hashes:
1368             # TODO: Move _ensure_changes_hash into this class
1369             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1370                 self.rejects.append(j)
1371             if "source" in self.pkg.changes["architecture"]:
1372                 # TODO: Move _ensure_dsc_hash into this class
1373                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1374                     self.rejects.append(j)
1375
1376     def check_hashes(self):
1377         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1378             self.rejects.append(m)
1379
1380         for m in utils.check_size(".changes", self.pkg.files):
1381             self.rejects.append(m)
1382
1383         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1384             self.rejects.append(m)
1385
1386         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1387             self.rejects.append(m)
1388
1389         self.ensure_hashes()
1390
1391     ###########################################################################
1392
1393     def ensure_orig(self, target_dir='.', session=None):
1394         """
1395         Ensures that all orig files mentioned in the changes file are present
1396         in target_dir. If they do not exist, they are symlinked into place.
1397
1398         An list containing the symlinks that were created are returned (so they
1399         can be removed).
1400         """
1401
1402         symlinked = []
1403         cnf = Config()
1404
1405         for filename, entry in self.pkg.dsc_files.iteritems():
1406             if not re_is_orig_source.match(filename):
1407                 # File is not an orig; ignore
1408                 continue
1409
1410             if os.path.exists(filename):
1411                 # File exists, no need to continue
1412                 continue
1413
1414             def symlink_if_valid(path):
1415                 f = utils.open_file(path)
1416                 md5sum = apt_pkg.md5sum(f)
1417                 f.close()
1418
1419                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1420                 expected = (int(entry['size']), entry['md5sum'])
1421
1422                 if fingerprint != expected:
1423                     return False
1424
1425                 dest = os.path.join(target_dir, filename)
1426
1427                 os.symlink(path, dest)
1428                 symlinked.append(dest)
1429
1430                 return True
1431
1432             session_ = session
1433             if session is None:
1434                 session_ = DBConn().session()
1435
1436             found = False
1437
1438             # Look in the pool
1439             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1440                 poolfile_path = os.path.join(
1441                     poolfile.location.path, poolfile.filename
1442                 )
1443
1444                 if symlink_if_valid(poolfile_path):
1445                     found = True
1446                     break
1447
1448             if session is None:
1449                 session_.close()
1450
1451             if found:
1452                 continue
1453
1454             # Look in some other queues for the file
1455             queues = ('New', 'Byhand', 'ProposedUpdates',
1456                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1457
1458             for queue in queues:
1459                 if not cnf.get('Dir::Queue::%s' % queue):
1460                     continue
1461
1462                 queuefile_path = os.path.join(
1463                     cnf['Dir::Queue::%s' % queue], filename
1464                 )
1465
1466                 if not os.path.exists(queuefile_path):
1467                     # Does not exist in this queue
1468                     continue
1469
1470                 if symlink_if_valid(queuefile_path):
1471                     break
1472
1473         return symlinked
1474
1475     ###########################################################################
1476
1477     def check_lintian(self):
1478         """
1479         Extends self.rejects by checking the output of lintian against tags
1480         specified in Dinstall::LintianTags.
1481         """
1482
1483         cnf = Config()
1484
1485         # Don't reject binary uploads
1486         if not self.pkg.changes['architecture'].has_key('source'):
1487             return
1488
1489         # Only check some distributions
1490         for dist in ('unstable', 'experimental'):
1491             if dist in self.pkg.changes['distribution']:
1492                 break
1493         else:
1494             return
1495
1496         # If we do not have a tagfile, don't do anything
1497         tagfile = cnf.get("Dinstall::LintianTags")
1498         if tagfile is None:
1499             return
1500
1501         # Parse the yaml file
1502         sourcefile = file(tagfile, 'r')
1503         sourcecontent = sourcefile.read()
1504         sourcefile.close()
1505
1506         try:
1507             lintiantags = yaml.load(sourcecontent)['lintian']
1508         except yaml.YAMLError, msg:
1509             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1510             return
1511
1512         # Try and find all orig mentioned in the .dsc
1513         symlinked = self.ensure_orig()
1514
1515         # Setup the input file for lintian
1516         fd, temp_filename = utils.temp_filename()
1517         temptagfile = os.fdopen(fd, 'w')
1518         for tags in lintiantags.values():
1519             temptagfile.writelines(['%s\n' % x for x in tags])
1520         temptagfile.close()
1521
1522         try:
1523             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1524                 (temp_filename, self.pkg.changes_file)
1525
1526             result, output = commands.getstatusoutput(cmd)
1527         finally:
1528             # Remove our tempfile and any symlinks we created
1529             os.unlink(temp_filename)
1530
1531             for symlink in symlinked:
1532                 os.unlink(symlink)
1533
1534         if result == 2:
1535             utils.warn("lintian failed for %s [return code: %s]." % \
1536                 (self.pkg.changes_file, result))
1537             utils.warn(utils.prefix_multi_line_string(output, \
1538                 " [possible output:] "))
1539
1540         def log(*txt):
1541             if self.logger:
1542                 self.logger.log(
1543                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1544                 )
1545
1546         # Generate messages
1547         parsed_tags = parse_lintian_output(output)
1548         self.rejects.extend(
1549             generate_reject_messages(parsed_tags, lintiantags, log=log)
1550         )
1551
1552     ###########################################################################
1553     def check_urgency(self):
1554         cnf = Config()
1555         if self.pkg.changes["architecture"].has_key("source"):
1556             if not self.pkg.changes.has_key("urgency"):
1557                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1558             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1559             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1560                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1561                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1562                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1563
1564     ###########################################################################
1565
1566     # Sanity check the time stamps of files inside debs.
1567     # [Files in the near future cause ugly warnings and extreme time
1568     #  travel can cause errors on extraction]
1569
1570     def check_timestamps(self):
1571         Cnf = Config()
1572
1573         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1574         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1575         tar = TarTime(future_cutoff, past_cutoff)
1576
1577         for filename, entry in self.pkg.files.items():
1578             if entry["type"] == "deb":
1579                 tar.reset()
1580                 try:
1581                     deb_file = utils.open_file(filename)
1582                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1583                     deb_file.seek(0)
1584                     try:
1585                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1586                     except SystemError, e:
1587                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1588                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1589                             raise
1590                         deb_file.seek(0)
1591                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1592
1593                     deb_file.close()
1594
1595                     future_files = tar.future_files.keys()
1596                     if future_files:
1597                         num_future_files = len(future_files)
1598                         future_file = future_files[0]
1599                         future_date = tar.future_files[future_file]
1600                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1601                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1602
1603                     ancient_files = tar.ancient_files.keys()
1604                     if ancient_files:
1605                         num_ancient_files = len(ancient_files)
1606                         ancient_file = ancient_files[0]
1607                         ancient_date = tar.ancient_files[ancient_file]
1608                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1609                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1610                 except:
1611                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1612
1613     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1614         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1615             sponsored = False
1616         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1617             sponsored = False
1618             if uid_name == "":
1619                 sponsored = True
1620         else:
1621             sponsored = True
1622             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1623                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1624                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1625                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1626                         self.pkg.changes["sponsoremail"] = uid_email
1627
1628         return sponsored
1629
1630
1631     ###########################################################################
1632     # check_signed_by_key checks
1633     ###########################################################################
1634
1635     def check_signed_by_key(self):
1636         """Ensure the .changes is signed by an authorized uploader."""
1637         session = DBConn().session()
1638
1639         # First of all we check that the person has proper upload permissions
1640         # and that this upload isn't blocked
1641         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1642
1643         if fpr is None:
1644             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1645             return
1646
1647         # TODO: Check that import-keyring adds UIDs properly
1648         if not fpr.uid:
1649             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1650             return
1651
1652         # Check that the fingerprint which uploaded has permission to do so
1653         self.check_upload_permissions(fpr, session)
1654
1655         # Check that this package is not in a transition
1656         self.check_transition(session)
1657
1658         session.close()
1659
1660
1661     def check_upload_permissions(self, fpr, session):
1662         # Check any one-off upload blocks
1663         self.check_upload_blocks(fpr, session)
1664
1665         # Start with DM as a special case
1666         # DM is a special case unfortunately, so we check it first
1667         # (keys with no source access get more access than DMs in one
1668         #  way; DMs can only upload for their packages whether source
1669         #  or binary, whereas keys with no access might be able to
1670         #  upload some binaries)
1671         if fpr.source_acl.access_level == 'dm':
1672             self.check_dm_upload(fpr, session)
1673         else:
1674             # Check source-based permissions for other types
1675             if self.pkg.changes["architecture"].has_key("source") and \
1676                 fpr.source_acl.access_level is None:
1677                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1678                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1679                 self.rejects.append(rej)
1680                 return
1681             # If not a DM, we allow full upload rights
1682             uid_email = "%s@debian.org" % (fpr.uid.uid)
1683             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1684
1685
1686         # Check binary upload permissions
1687         # By this point we know that DMs can't have got here unless they
1688         # are allowed to deal with the package concerned so just apply
1689         # normal checks
1690         if fpr.binary_acl.access_level == 'full':
1691             return
1692
1693         # Otherwise we're in the map case
1694         tmparches = self.pkg.changes["architecture"].copy()
1695         tmparches.pop('source', None)
1696
1697         for bam in fpr.binary_acl_map:
1698             tmparches.pop(bam.architecture.arch_string, None)
1699
1700         if len(tmparches.keys()) > 0:
1701             if fpr.binary_reject:
1702                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1703                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1704                 self.rejects.append(rej)
1705             else:
1706                 # TODO: This is where we'll implement reject vs throw away binaries later
1707                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1708                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1709                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1710                 self.rejects.append(rej)
1711
1712
1713     def check_upload_blocks(self, fpr, session):
1714         """Check whether any upload blocks apply to this source, source
1715            version, uid / fpr combination"""
1716
1717         def block_rej_template(fb):
1718             rej = 'Manual upload block in place for package %s' % fb.source
1719             if fb.version is not None:
1720                 rej += ', version %s' % fb.version
1721             return rej
1722
1723         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1724             # version is None if the block applies to all versions
1725             if fb.version is None or fb.version == self.pkg.changes['version']:
1726                 # Check both fpr and uid - either is enough to cause a reject
1727                 if fb.fpr is not None:
1728                     if fb.fpr.fingerprint == fpr.fingerprint:
1729                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1730                 if fb.uid is not None:
1731                     if fb.uid == fpr.uid:
1732                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1733
1734
1735     def check_dm_upload(self, fpr, session):
1736         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1737         ## none of the uploaded packages are NEW
1738         rej = False
1739         for f in self.pkg.files.keys():
1740             if self.pkg.files[f].has_key("byhand"):
1741                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1742                 rej = True
1743             if self.pkg.files[f].has_key("new"):
1744                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1745                 rej = True
1746
1747         if rej:
1748             return
1749
1750         r = get_newest_source(self.pkg.changes["source"], session)
1751
1752         if r is None:
1753             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1754             self.rejects.append(rej)
1755             return
1756
1757         if not r.dm_upload_allowed:
1758             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1759             self.rejects.append(rej)
1760             return
1761
1762         ## the Maintainer: field of the uploaded .changes file corresponds with
1763         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1764         ## uploads)
1765         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1766             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1767
1768         ## the most recent version of the package uploaded to unstable or
1769         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1770         ## non-developer maintainers cannot NMU or hijack packages)
1771
1772         # srcuploaders includes the maintainer
1773         accept = False
1774         for sup in r.srcuploaders:
1775             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1776             # Eww - I hope we never have two people with the same name in Debian
1777             if email == fpr.uid.uid or name == fpr.uid.name:
1778                 accept = True
1779                 break
1780
1781         if not accept:
1782             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1783             return
1784
1785         ## none of the packages are being taken over from other source packages
1786         for b in self.pkg.changes["binary"].keys():
1787             for suite in self.pkg.changes["distribution"].keys():
1788                 for s in get_source_by_package_and_suite(b, suite, session):
1789                     if s.source != self.pkg.changes["source"]:
1790                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1791
1792
1793
1794     def check_transition(self, session):
1795         cnf = Config()
1796
1797         sourcepkg = self.pkg.changes["source"]
1798
1799         # No sourceful upload -> no need to do anything else, direct return
1800         # We also work with unstable uploads, not experimental or those going to some
1801         # proposed-updates queue
1802         if "source" not in self.pkg.changes["architecture"] or \
1803            "unstable" not in self.pkg.changes["distribution"]:
1804             return
1805
1806         # Also only check if there is a file defined (and existant) with
1807         # checks.
1808         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1809         if transpath == "" or not os.path.exists(transpath):
1810             return
1811
1812         # Parse the yaml file
1813         sourcefile = file(transpath, 'r')
1814         sourcecontent = sourcefile.read()
1815         try:
1816             transitions = yaml.load(sourcecontent)
1817         except yaml.YAMLError, msg:
1818             # This shouldn't happen, there is a wrapper to edit the file which
1819             # checks it, but we prefer to be safe than ending up rejecting
1820             # everything.
1821             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1822             return
1823
1824         # Now look through all defined transitions
1825         for trans in transitions:
1826             t = transitions[trans]
1827             source = t["source"]
1828             expected = t["new"]
1829
1830             # Will be None if nothing is in testing.
1831             current = get_source_in_suite(source, "testing", session)
1832             if current is not None:
1833                 compare = apt_pkg.VersionCompare(current.version, expected)
1834
1835             if current is None or compare < 0:
1836                 # This is still valid, the current version in testing is older than
1837                 # the new version we wait for, or there is none in testing yet
1838
1839                 # Check if the source we look at is affected by this.
1840                 if sourcepkg in t['packages']:
1841                     # The source is affected, lets reject it.
1842
1843                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1844                         sourcepkg, trans)
1845
1846                     if current is not None:
1847                         currentlymsg = "at version %s" % (current.version)
1848                     else:
1849                         currentlymsg = "not present in testing"
1850
1851                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1852
1853                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1854 is part of a testing transition designed to get %s migrated (it is
1855 currently %s, we need version %s).  This transition is managed by the
1856 Release Team, and %s is the Release-Team member responsible for it.
1857 Please mail debian-release@lists.debian.org or contact %s directly if you
1858 need further assistance.  You might want to upload to experimental until this
1859 transition is done."""
1860                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1861
1862                     self.rejects.append(rejectmsg)
1863                     return
1864
1865     ###########################################################################
1866     # End check_signed_by_key checks
1867     ###########################################################################
1868
1869     def build_summaries(self):
1870         """ Build a summary of changes the upload introduces. """
1871
1872         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1873
1874         short_summary = summary
1875
1876         # This is for direport's benefit...
1877         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1878
1879         if byhand or new:
1880             summary += "Changes: " + f
1881
1882         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1883
1884         summary += self.announce(short_summary, 0)
1885
1886         return (summary, short_summary)
1887
1888     ###########################################################################
1889
1890     def close_bugs(self, summary, action):
1891         """
1892         Send mail to close bugs as instructed by the closes field in the changes file.
1893         Also add a line to summary if any work was done.
1894
1895         @type summary: string
1896         @param summary: summary text, as given by L{build_summaries}
1897
1898         @type action: bool
1899         @param action: Set to false no real action will be done.
1900
1901         @rtype: string
1902         @return: summary. If action was taken, extended by the list of closed bugs.
1903
1904         """
1905
1906         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1907
1908         bugs = self.pkg.changes["closes"].keys()
1909
1910         if not bugs:
1911             return summary
1912
1913         bugs.sort()
1914         summary += "Closing bugs: "
1915         for bug in bugs:
1916             summary += "%s " % (bug)
1917             if action:
1918                 self.update_subst()
1919                 self.Subst["__BUG_NUMBER__"] = bug
1920                 if self.pkg.changes["distribution"].has_key("stable"):
1921                     self.Subst["__STABLE_WARNING__"] = """
1922 Note that this package is not part of the released stable Debian
1923 distribution.  It may have dependencies on other unreleased software,
1924 or other instabilities.  Please take care if you wish to install it.
1925 The update will eventually make its way into the next released Debian
1926 distribution."""
1927                 else:
1928                     self.Subst["__STABLE_WARNING__"] = ""
1929                 mail_message = utils.TemplateSubst(self.Subst, template)
1930                 utils.send_mail(mail_message)
1931
1932                 # Clear up after ourselves
1933                 del self.Subst["__BUG_NUMBER__"]
1934                 del self.Subst["__STABLE_WARNING__"]
1935
1936         if action and self.logger:
1937             self.logger.log(["closing bugs"] + bugs)
1938
1939         summary += "\n"
1940
1941         return summary
1942
1943     ###########################################################################
1944
1945     def announce(self, short_summary, action):
1946         """
1947         Send an announce mail about a new upload.
1948
1949         @type short_summary: string
1950         @param short_summary: Short summary text to include in the mail
1951
1952         @type action: bool
1953         @param action: Set to false no real action will be done.
1954
1955         @rtype: string
1956         @return: Textstring about action taken.
1957
1958         """
1959
1960         cnf = Config()
1961         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1962
1963         # Only do announcements for source uploads with a recent dpkg-dev installed
1964         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1965            self.pkg.changes["architecture"].has_key("source"):
1966             return ""
1967
1968         lists_done = {}
1969         summary = ""
1970
1971         self.Subst["__SHORT_SUMMARY__"] = short_summary
1972
1973         for dist in self.pkg.changes["distribution"].keys():
1974             suite = get_suite(dist)
1975             if suite is None: continue
1976             announce_list = suite.announce
1977             if announce_list == "" or lists_done.has_key(announce_list):
1978                 continue
1979
1980             lists_done[announce_list] = 1
1981             summary += "Announcing to %s\n" % (announce_list)
1982
1983             if action:
1984                 self.update_subst()
1985                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1986                 if cnf.get("Dinstall::TrackingServer") and \
1987                    self.pkg.changes["architecture"].has_key("source"):
1988                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1989                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1990
1991                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1992                 utils.send_mail(mail_message)
1993
1994                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1995
1996         if cnf.FindB("Dinstall::CloseBugs"):
1997             summary = self.close_bugs(summary, action)
1998
1999         del self.Subst["__SHORT_SUMMARY__"]
2000
2001         return summary
2002
2003     ###########################################################################
2004     @session_wrapper
2005     def accept (self, summary, short_summary, session=None):
2006         """
2007         Accept an upload.
2008
2009         This moves all files referenced from the .changes into the pool,
2010         sends the accepted mail, announces to lists, closes bugs and
2011         also checks for override disparities. If enabled it will write out
2012         the version history for the BTS Version Tracking and will finally call
2013         L{queue_build}.
2014
2015         @type summary: string
2016         @param summary: Summary text
2017
2018         @type short_summary: string
2019         @param short_summary: Short summary
2020         """
2021
2022         cnf = Config()
2023         stats = SummaryStats()
2024
2025         print "Installing."
2026         self.logger.log(["installing changes", self.pkg.changes_file])
2027
2028         poolfiles = []
2029
2030         # Add the .dsc file to the DB first
2031         for newfile, entry in self.pkg.files.items():
2032             if entry["type"] == "dsc":
2033                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2034                 for j in pfs:
2035                     poolfiles.append(j)
2036
2037         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2038         for newfile, entry in self.pkg.files.items():
2039             if entry["type"] == "deb":
2040                 poolfiles.append(add_deb_to_db(self, newfile, session))
2041
2042         # If this is a sourceful diff only upload that is moving
2043         # cross-component we need to copy the .orig files into the new
2044         # component too for the same reasons as above.
2045         # XXX: mhy: I think this should be in add_dsc_to_db
2046         if self.pkg.changes["architecture"].has_key("source"):
2047             for orig_file in self.pkg.orig_files.keys():
2048                 if not self.pkg.orig_files[orig_file].has_key("id"):
2049                     continue # Skip if it's not in the pool
2050                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2051                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2052                     continue # Skip if the location didn't change
2053
2054                 # Do the move
2055                 oldf = get_poolfile_by_id(orig_file_id, session)
2056                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2057                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2058                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2059
2060                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2061
2062                 # TODO: Care about size/md5sum collisions etc
2063                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2064
2065                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2066                 if newf is None:
2067                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2068                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2069
2070                     session.flush()
2071
2072                     # Don't reference the old file from this changes
2073                     for p in poolfiles:
2074                         if p.file_id == oldf.file_id:
2075                             poolfiles.remove(p)
2076
2077                     poolfiles.append(newf)
2078
2079                     # Fix up the DSC references
2080                     toremove = []
2081
2082                     for df in source.srcfiles:
2083                         if df.poolfile.file_id == oldf.file_id:
2084                             # Add a new DSC entry and mark the old one for deletion
2085                             # Don't do it in the loop so we don't change the thing we're iterating over
2086                             newdscf = DSCFile()
2087                             newdscf.source_id = source.source_id
2088                             newdscf.poolfile_id = newf.file_id
2089                             session.add(newdscf)
2090
2091                             toremove.append(df)
2092
2093                     for df in toremove:
2094                         session.delete(df)
2095
2096                     # Flush our changes
2097                     session.flush()
2098
2099                     # Make sure that our source object is up-to-date
2100                     session.expire(source)
2101
2102         # Add changelog information to the database
2103         self.store_changelog()
2104
2105         # Install the files into the pool
2106         for newfile, entry in self.pkg.files.items():
2107             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2108             utils.move(newfile, destination)
2109             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2110             stats.accept_bytes += float(entry["size"])
2111
2112         # Copy the .changes file across for suite which need it.
2113         copy_changes = dict([(x.copychanges, '')
2114                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2115                              if x.copychanges is not None])
2116
2117         for dest in copy_changes.keys():
2118             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2119
2120         # We're done - commit the database changes
2121         session.commit()
2122         # Our SQL session will automatically start a new transaction after
2123         # the last commit
2124
2125         # Move the .changes into the 'done' directory
2126         utils.move(self.pkg.changes_file,
2127                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2128
2129         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2130             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2131
2132         self.update_subst()
2133         self.Subst["__SUMMARY__"] = summary
2134         mail_message = utils.TemplateSubst(self.Subst,
2135                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2136         utils.send_mail(mail_message)
2137         self.announce(short_summary, 1)
2138
2139         ## Helper stuff for DebBugs Version Tracking
2140         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2141             if self.pkg.changes["architecture"].has_key("source"):
2142                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2143                 version_history = os.fdopen(fd, 'w')
2144                 version_history.write(self.pkg.dsc["bts changelog"])
2145                 version_history.close()
2146                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2147                                       self.pkg.changes_file[:-8]+".versions")
2148                 os.rename(temp_filename, filename)
2149                 os.chmod(filename, 0644)
2150
2151             # Write out the binary -> source mapping.
2152             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2153             debinfo = os.fdopen(fd, 'w')
2154             for name, entry in sorted(self.pkg.files.items()):
2155                 if entry["type"] == "deb":
2156                     line = " ".join([entry["package"], entry["version"],
2157                                      entry["architecture"], entry["source package"],
2158                                      entry["source version"]])
2159                     debinfo.write(line+"\n")
2160             debinfo.close()
2161             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2162                                   self.pkg.changes_file[:-8]+".debinfo")
2163             os.rename(temp_filename, filename)
2164             os.chmod(filename, 0644)
2165
2166         session.commit()
2167
2168         # Set up our copy queues (e.g. buildd queues)
2169         for suite_name in self.pkg.changes["distribution"].keys():
2170             suite = get_suite(suite_name, session)
2171             for q in suite.copy_queues:
2172                 for f in poolfiles:
2173                     q.add_file_from_pool(f)
2174
2175         session.commit()
2176
2177         # Finally...
2178         stats.accept_count += 1
2179
2180     def check_override(self):
2181         """
2182         Checks override entries for validity. Mails "Override disparity" warnings,
2183         if that feature is enabled.
2184
2185         Abandons the check if
2186           - override disparity checks are disabled
2187           - mail sending is disabled
2188         """
2189
2190         cnf = Config()
2191
2192         # Abandon the check if override disparity checks have been disabled
2193         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2194             return
2195
2196         summary = self.pkg.check_override()
2197
2198         if summary == "":
2199             return
2200
2201         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2202
2203         self.update_subst()
2204         self.Subst["__SUMMARY__"] = summary
2205         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2206         utils.send_mail(mail_message)
2207         del self.Subst["__SUMMARY__"]
2208
2209     ###########################################################################
2210
2211     def remove(self, from_dir=None):
2212         """
2213         Used (for instance) in p-u to remove the package from unchecked
2214
2215         Also removes the package from holding area.
2216         """
2217         if from_dir is None:
2218             from_dir = self.pkg.directory
2219         h = Holding()
2220
2221         for f in self.pkg.files.keys():
2222             os.unlink(os.path.join(from_dir, f))
2223             if os.path.exists(os.path.join(h.holding_dir, f)):
2224                 os.unlink(os.path.join(h.holding_dir, f))
2225
2226         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2227         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2228             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2229
2230     ###########################################################################
2231
2232     def move_to_queue (self, queue):
2233         """
2234         Move files to a destination queue using the permissions in the table
2235         """
2236         h = Holding()
2237         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2238                    queue.path, perms=int(queue.change_perms, 8))
2239         for f in self.pkg.files.keys():
2240             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2241
2242     ###########################################################################
2243
2244     def force_reject(self, reject_files):
2245         """
2246         Forcefully move files from the current directory to the
2247         reject directory.  If any file already exists in the reject
2248         directory it will be moved to the morgue to make way for
2249         the new file.
2250
2251         @type reject_files: dict
2252         @param reject_files: file dictionary
2253
2254         """
2255
2256         cnf = Config()
2257
2258         for file_entry in reject_files:
2259             # Skip any files which don't exist or which we don't have permission to copy.
2260             if os.access(file_entry, os.R_OK) == 0:
2261                 continue
2262
2263             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2264
2265             try:
2266                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2267             except OSError, e:
2268                 # File exists?  Let's find a new name by adding a number
2269                 if e.errno == errno.EEXIST:
2270                     try:
2271                         dest_file = utils.find_next_free(dest_file, 255)
2272                     except NoFreeFilenameError:
2273                         # Something's either gone badly Pete Tong, or
2274                         # someone is trying to exploit us.
2275                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2276                         return
2277
2278                     # Make sure we really got it
2279                     try:
2280                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2281                     except OSError, e:
2282                         # Likewise
2283                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2284                         return
2285                 else:
2286                     raise
2287             # If we got here, we own the destination file, so we can
2288             # safely overwrite it.
2289             utils.move(file_entry, dest_file, 1, perms=0660)
2290             os.close(dest_fd)
2291
2292     ###########################################################################
2293     def do_reject (self, manual=0, reject_message="", notes=""):
2294         """
2295         Reject an upload. If called without a reject message or C{manual} is
2296         true, spawn an editor so the user can write one.
2297
2298         @type manual: bool
2299         @param manual: manual or automated rejection
2300
2301         @type reject_message: string
2302         @param reject_message: A reject message
2303
2304         @return: 0
2305
2306         """
2307         # If we weren't given a manual rejection message, spawn an
2308         # editor so the user can add one in...
2309         if manual and not reject_message:
2310             (fd, temp_filename) = utils.temp_filename()
2311             temp_file = os.fdopen(fd, 'w')
2312             if len(notes) > 0:
2313                 for note in notes:
2314                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2315                                     % (note.author, note.version, note.notedate, note.comment))
2316             temp_file.close()
2317             editor = os.environ.get("EDITOR","vi")
2318             answer = 'E'
2319             while answer == 'E':
2320                 os.system("%s %s" % (editor, temp_filename))
2321                 temp_fh = utils.open_file(temp_filename)
2322                 reject_message = "".join(temp_fh.readlines())
2323                 temp_fh.close()
2324                 print "Reject message:"
2325                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2326                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2327                 answer = "XXX"
2328                 while prompt.find(answer) == -1:
2329                     answer = utils.our_raw_input(prompt)
2330                     m = re_default_answer.search(prompt)
2331                     if answer == "":
2332                         answer = m.group(1)
2333                     answer = answer[:1].upper()
2334             os.unlink(temp_filename)
2335             if answer == 'A':
2336                 return 1
2337             elif answer == 'Q':
2338                 sys.exit(0)
2339
2340         print "Rejecting.\n"
2341
2342         cnf = Config()
2343
2344         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2345         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2346
2347         # Move all the files into the reject directory
2348         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2349         self.force_reject(reject_files)
2350
2351         # If we fail here someone is probably trying to exploit the race
2352         # so let's just raise an exception ...
2353         if os.path.exists(reason_filename):
2354             os.unlink(reason_filename)
2355         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2356
2357         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2358
2359         self.update_subst()
2360         if not manual:
2361             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2362             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2363             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2364             os.write(reason_fd, reject_message)
2365             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2366         else:
2367             # Build up the rejection email
2368             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2369             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2370             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2371             self.Subst["__REJECT_MESSAGE__"] = ""
2372             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2373             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2374             # Write the rejection email out as the <foo>.reason file
2375             os.write(reason_fd, reject_mail_message)
2376
2377         del self.Subst["__REJECTOR_ADDRESS__"]
2378         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2379         del self.Subst["__CC__"]
2380
2381         os.close(reason_fd)
2382
2383         # Send the rejection mail
2384         utils.send_mail(reject_mail_message)
2385
2386         if self.logger:
2387             self.logger.log(["rejected", self.pkg.changes_file])
2388
2389         return 0
2390
2391     ################################################################################
2392     def in_override_p(self, package, component, suite, binary_type, filename, session):
2393         """
2394         Check if a package already has override entries in the DB
2395
2396         @type package: string
2397         @param package: package name
2398
2399         @type component: string
2400         @param component: database id of the component
2401
2402         @type suite: int
2403         @param suite: database id of the suite
2404
2405         @type binary_type: string
2406         @param binary_type: type of the package
2407
2408         @type filename: string
2409         @param filename: filename we check
2410
2411         @return: the database result. But noone cares anyway.
2412
2413         """
2414
2415         cnf = Config()
2416
2417         if binary_type == "": # must be source
2418             file_type = "dsc"
2419         else:
2420             file_type = binary_type
2421
2422         # Override suite name; used for example with proposed-updates
2423         oldsuite = get_suite(suite, session)
2424         if (not oldsuite is None) and oldsuite.overridesuite:
2425             suite = oldsuite.overridesuite
2426
2427         result = get_override(package, suite, component, file_type, session)
2428
2429         # If checking for a source package fall back on the binary override type
2430         if file_type == "dsc" and len(result) < 1:
2431             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2432
2433         # Remember the section and priority so we can check them later if appropriate
2434         if len(result) > 0:
2435             result = result[0]
2436             self.pkg.files[filename]["override section"] = result.section.section
2437             self.pkg.files[filename]["override priority"] = result.priority.priority
2438             return result
2439
2440         return None
2441
2442     ################################################################################
2443     def get_anyversion(self, sv_list, suite):
2444         """
2445         @type sv_list: list
2446         @param sv_list: list of (suite, version) tuples to check
2447
2448         @type suite: string
2449         @param suite: suite name
2450
2451         Description: TODO
2452         """
2453         Cnf = Config()
2454         anyversion = None
2455         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2456         for (s, v) in sv_list:
2457             if s in [ x.lower() for x in anysuite ]:
2458                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2459                     anyversion = v
2460
2461         return anyversion
2462
2463     ################################################################################
2464
2465     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2466         """
2467         @type sv_list: list
2468         @param sv_list: list of (suite, version) tuples to check
2469
2470         @type filename: string
2471         @param filename: XXX
2472
2473         @type new_version: string
2474         @param new_version: XXX
2475
2476         Ensure versions are newer than existing packages in target
2477         suites and that cross-suite version checking rules as
2478         set out in the conf file are satisfied.
2479         """
2480
2481         cnf = Config()
2482
2483         # Check versions for each target suite
2484         for target_suite in self.pkg.changes["distribution"].keys():
2485             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2486             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2487
2488             # Enforce "must be newer than target suite" even if conffile omits it
2489             if target_suite not in must_be_newer_than:
2490                 must_be_newer_than.append(target_suite)
2491
2492             for (suite, existent_version) in sv_list:
2493                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2494
2495                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2496                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2497
2498                 if suite in must_be_older_than and vercmp > -1:
2499                     cansave = 0
2500
2501                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2502                         # we really use the other suite, ignoring the conflicting one ...
2503                         addsuite = self.pkg.changes["distribution-version"][suite]
2504
2505                         add_version = self.get_anyversion(sv_list, addsuite)
2506                         target_version = self.get_anyversion(sv_list, target_suite)
2507
2508                         if not add_version:
2509                             # not add_version can only happen if we map to a suite
2510                             # that doesn't enhance the suite we're propup'ing from.
2511                             # so "propup-ver x a b c; map a d" is a problem only if
2512                             # d doesn't enhance a.
2513                             #
2514                             # i think we could always propagate in this case, rather
2515                             # than complaining. either way, this isn't a REJECT issue
2516                             #
2517                             # And - we really should complain to the dorks who configured dak
2518                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2519                             self.pkg.changes.setdefault("propdistribution", {})
2520                             self.pkg.changes["propdistribution"][addsuite] = 1
2521                             cansave = 1
2522                         elif not target_version:
2523                             # not targets_version is true when the package is NEW
2524                             # we could just stick with the "...old version..." REJECT
2525                             # for this, I think.
2526                             self.rejects.append("Won't propogate NEW packages.")
2527                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2528                             # propogation would be redundant. no need to reject though.
2529                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2530                             cansave = 1
2531                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2532                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2533                             # propogate!!
2534                             self.warnings.append("Propogating upload to %s" % (addsuite))
2535                             self.pkg.changes.setdefault("propdistribution", {})
2536                             self.pkg.changes["propdistribution"][addsuite] = 1
2537                             cansave = 1
2538
2539                     if not cansave:
2540                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2541
2542     ################################################################################
2543     def check_binary_against_db(self, filename, session):
2544         # Ensure version is sane
2545         self.cross_suite_version_check( \
2546             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2547                 self.pkg.files[filename]["architecture"], session),
2548             filename, self.pkg.files[filename]["version"], sourceful=False)
2549
2550         # Check for any existing copies of the file
2551         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2552         q = q.filter_by(version=self.pkg.files[filename]["version"])
2553         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2554
2555         if q.count() > 0:
2556             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2557
2558     ################################################################################
2559
2560     def check_source_against_db(self, filename, session):
2561         source = self.pkg.dsc.get("source")
2562         version = self.pkg.dsc.get("version")
2563
2564         # Ensure version is sane
2565         self.cross_suite_version_check( \
2566             get_suite_version_by_source(source, session), filename, version,
2567             sourceful=True)
2568
2569     ################################################################################
2570     def check_dsc_against_db(self, filename, session):
2571         """
2572
2573         @warning: NB: this function can remove entries from the 'files' index [if
2574          the orig tarball is a duplicate of the one in the archive]; if
2575          you're iterating over 'files' and call this function as part of
2576          the loop, be sure to add a check to the top of the loop to
2577          ensure you haven't just tried to dereference the deleted entry.
2578
2579         """
2580
2581         Cnf = Config()
2582         self.pkg.orig_files = {} # XXX: do we need to clear it?
2583         orig_files = self.pkg.orig_files
2584
2585         # Try and find all files mentioned in the .dsc.  This has
2586         # to work harder to cope with the multiple possible
2587         # locations of an .orig.tar.gz.
2588         # The ordering on the select is needed to pick the newest orig
2589         # when it exists in multiple places.
2590         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2591             found = None
2592             if self.pkg.files.has_key(dsc_name):
2593                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2594                 actual_size = int(self.pkg.files[dsc_name]["size"])
2595                 found = "%s in incoming" % (dsc_name)
2596
2597                 # Check the file does not already exist in the archive
2598                 ql = get_poolfile_like_name(dsc_name, session)
2599
2600                 # Strip out anything that isn't '%s' or '/%s$'
2601                 for i in ql:
2602                     if not i.filename.endswith(dsc_name):
2603                         ql.remove(i)
2604
2605                 # "[dak] has not broken them.  [dak] has fixed a
2606                 # brokenness.  Your crappy hack exploited a bug in
2607                 # the old dinstall.
2608                 #
2609                 # "(Come on!  I thought it was always obvious that
2610                 # one just doesn't release different files with
2611                 # the same name and version.)"
2612                 #                        -- ajk@ on d-devel@l.d.o
2613
2614                 if len(ql) > 0:
2615                     # Ignore exact matches for .orig.tar.gz
2616                     match = 0
2617                     if re_is_orig_source.match(dsc_name):
2618                         for i in ql:
2619                             if self.pkg.files.has_key(dsc_name) and \
2620                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2621                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2622                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2623                                 # TODO: Don't delete the entry, just mark it as not needed
2624                                 # This would fix the stupidity of changing something we often iterate over
2625                                 # whilst we're doing it
2626                                 del self.pkg.files[dsc_name]
2627                                 dsc_entry["files id"] = i.file_id
2628                                 if not orig_files.has_key(dsc_name):
2629                                     orig_files[dsc_name] = {}
2630                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2631                                 match = 1
2632
2633                                 # Don't bitch that we couldn't find this file later
2634                                 try:
2635                                     self.later_check_files.remove(dsc_name)
2636                                 except ValueError:
2637                                     pass
2638
2639
2640                     if not match:
2641                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2642
2643             elif re_is_orig_source.match(dsc_name):
2644                 # Check in the pool
2645                 ql = get_poolfile_like_name(dsc_name, session)
2646
2647                 # Strip out anything that isn't '%s' or '/%s$'
2648                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2649                 for i in ql:
2650                     if not i.filename.endswith(dsc_name):
2651                         ql.remove(i)
2652
2653                 if len(ql) > 0:
2654                     # Unfortunately, we may get more than one match here if,
2655                     # for example, the package was in potato but had an -sa
2656                     # upload in woody.  So we need to choose the right one.
2657
2658                     # default to something sane in case we don't match any or have only one
2659                     x = ql[0]
2660
2661                     if len(ql) > 1:
2662                         for i in ql:
2663                             old_file = os.path.join(i.location.path, i.filename)
2664                             old_file_fh = utils.open_file(old_file)
2665                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2666                             old_file_fh.close()
2667                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2668                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2669                                 x = i
2670
2671                     old_file = os.path.join(i.location.path, i.filename)
2672                     old_file_fh = utils.open_file(old_file)
2673                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2674                     old_file_fh.close()
2675                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2676                     found = old_file
2677                     suite_type = x.location.archive_type
2678                     # need this for updating dsc_files in install()
2679                     dsc_entry["files id"] = x.file_id
2680                     # See install() in process-accepted...
2681                     if not orig_files.has_key(dsc_name):
2682                         orig_files[dsc_name] = {}
2683                     orig_files[dsc_name]["id"] = x.file_id
2684                     orig_files[dsc_name]["path"] = old_file
2685                     orig_files[dsc_name]["location"] = x.location.location_id
2686                 else:
2687                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2688                     # Not there? Check the queue directories...
2689                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2690                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2691                             continue
2692                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2693                         if os.path.exists(in_otherdir):
2694                             in_otherdir_fh = utils.open_file(in_otherdir)
2695                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2696                             in_otherdir_fh.close()
2697                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2698                             found = in_otherdir
2699                             if not orig_files.has_key(dsc_name):
2700                                 orig_files[dsc_name] = {}
2701                             orig_files[dsc_name]["path"] = in_otherdir
2702
2703                     if not found:
2704                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2705                         continue
2706             else:
2707                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2708                 continue
2709             if actual_md5 != dsc_entry["md5sum"]:
2710                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2711             if actual_size != int(dsc_entry["size"]):
2712                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2713
2714     ################################################################################
2715     # This is used by process-new and process-holding to recheck a changes file
2716     # at the time we're running.  It mainly wraps various other internal functions
2717     # and is similar to accepted_checks - these should probably be tidied up
2718     # and combined
2719     def recheck(self, session):
2720         cnf = Config()
2721         for f in self.pkg.files.keys():
2722             # The .orig.tar.gz can disappear out from under us is it's a
2723             # duplicate of one in the archive.
2724             if not self.pkg.files.has_key(f):
2725                 continue
2726
2727             entry = self.pkg.files[f]
2728
2729             # Check that the source still exists
2730             if entry["type"] == "deb":
2731                 source_version = entry["source version"]
2732                 source_package = entry["source package"]
2733                 if not self.pkg.changes["architecture"].has_key("source") \
2734                    and not source_exists(source_package, source_version, \
2735                     suites = self.pkg.changes["distribution"].keys(), session = session):
2736                     source_epochless_version = re_no_epoch.sub('', source_version)
2737                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2738                     found = False
2739                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2740                         if cnf.has_key("Dir::Queue::%s" % (q)):
2741                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2742                                 found = True
2743                     if not found:
2744                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2745
2746             # Version and file overwrite checks
2747             if entry["type"] == "deb":
2748                 self.check_binary_against_db(f, session)
2749             elif entry["type"] == "dsc":
2750                 self.check_source_against_db(f, session)
2751                 self.check_dsc_against_db(f, session)
2752
2753     ################################################################################
2754     def accepted_checks(self, overwrite_checks, session):
2755         # Recheck anything that relies on the database; since that's not
2756         # frozen between accept and our run time when called from p-a.
2757
2758         # overwrite_checks is set to False when installing to stable/oldstable
2759
2760         propogate={}
2761         nopropogate={}
2762
2763         # Find the .dsc (again)
2764         dsc_filename = None
2765         for f in self.pkg.files.keys():
2766             if self.pkg.files[f]["type"] == "dsc":
2767                 dsc_filename = f
2768
2769         for checkfile in self.pkg.files.keys():
2770             # The .orig.tar.gz can disappear out from under us is it's a
2771             # duplicate of one in the archive.
2772             if not self.pkg.files.has_key(checkfile):
2773                 continue
2774
2775             entry = self.pkg.files[checkfile]
2776
2777             # Check that the source still exists
2778             if entry["type"] == "deb":
2779                 source_version = entry["source version"]
2780                 source_package = entry["source package"]
2781                 if not self.pkg.changes["architecture"].has_key("source") \
2782                    and not source_exists(source_package, source_version, \
2783                     suites = self.pkg.changes["distribution"].keys(), \
2784                     session = session):
2785                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2786
2787             # Version and file overwrite checks
2788             if overwrite_checks:
2789                 if entry["type"] == "deb":
2790                     self.check_binary_against_db(checkfile, session)
2791                 elif entry["type"] == "dsc":
2792                     self.check_source_against_db(checkfile, session)
2793                     self.check_dsc_against_db(dsc_filename, session)
2794
2795             # propogate in the case it is in the override tables:
2796             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2797                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2798                     propogate[suite] = 1
2799                 else:
2800                     nopropogate[suite] = 1
2801
2802         for suite in propogate.keys():
2803             if suite in nopropogate:
2804                 continue
2805             self.pkg.changes["distribution"][suite] = 1
2806
2807         for checkfile in self.pkg.files.keys():
2808             # Check the package is still in the override tables
2809             for suite in self.pkg.changes["distribution"].keys():
2810                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2811                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2812
2813     ################################################################################
2814     # If any file of an upload has a recent mtime then chances are good
2815     # the file is still being uploaded.
2816
2817     def upload_too_new(self):
2818         cnf = Config()
2819         too_new = False
2820         # Move back to the original directory to get accurate time stamps
2821         cwd = os.getcwd()
2822         os.chdir(self.pkg.directory)
2823         file_list = self.pkg.files.keys()
2824         file_list.extend(self.pkg.dsc_files.keys())
2825         file_list.append(self.pkg.changes_file)
2826         for f in file_list:
2827             try:
2828                 last_modified = time.time()-os.path.getmtime(f)
2829                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2830                     too_new = True
2831                     break
2832             except:
2833                 pass
2834
2835         os.chdir(cwd)
2836         return too_new
2837
2838     def store_changelog(self):
2839
2840         # Skip binary-only upload if it is not a bin-NMU
2841         if not self.pkg.changes['architecture'].has_key('source'):
2842             from daklib.regexes import re_bin_only_nmu
2843             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2844                 return
2845
2846         session = DBConn().session()
2847
2848         # Check if upload already has a changelog entry
2849         query = """SELECT changelog_id FROM changes WHERE source = :source
2850                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2851         if session.execute(query, {'source': self.pkg.changes['source'], \
2852                                    'version': self.pkg.changes['version'], \
2853                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2854             session.commit()
2855             return
2856
2857         # Add current changelog text into changelogs_text table, return created ID
2858         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2859         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2860
2861         # Link ID to the upload available in changes table
2862         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2863                    AND version = :version AND architecture = :architecture"""
2864         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2865                                 'version': self.pkg.changes['version'], \
2866                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2867
2868         session.commit()