]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Fix copy-paste error.
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @rtype: dict
122     @return: dictionary of NEW components.
123
124     """
125     # TODO: This should all use the database instead of parsing the changes
126     # file again
127     new = {}
128     byhand = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Build up a list of potentially new things
135     for name, f in files.items():
136         # Keep a record of byhand elements
137         if f["section"] == "byhand":
138             byhand[name] = 1
139             continue
140
141         pkg = f["package"]
142         priority = f["priority"]
143         section = f["section"]
144         file_type = get_type(f, session)
145         component = f["component"]
146
147         if file_type == "dsc":
148             priority = "source"
149
150         if not new.has_key(pkg):
151             new[pkg] = {}
152             new[pkg]["priority"] = priority
153             new[pkg]["section"] = section
154             new[pkg]["type"] = file_type
155             new[pkg]["component"] = component
156             new[pkg]["files"] = []
157         else:
158             old_type = new[pkg]["type"]
159             if old_type != file_type:
160                 # source gets trumped by deb or udeb
161                 if old_type == "dsc":
162                     new[pkg]["priority"] = priority
163                     new[pkg]["section"] = section
164                     new[pkg]["type"] = file_type
165                     new[pkg]["component"] = component
166
167         new[pkg]["files"].append(name)
168
169         if f.has_key("othercomponents"):
170             new[pkg]["othercomponents"] = f["othercomponents"]
171
172     # Fix up the list of target suites
173     cnf = Config()
174     for suite in changes["suite"].keys():
175         oldsuite = get_suite(suite, session)
176         if not oldsuite:
177             print "WARNING: Invalid suite %s found" % suite
178             continue
179
180         if oldsuite.overridesuite:
181             newsuite = get_suite(oldsuite.overridesuite, session)
182
183             if newsuite:
184                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185                     oldsuite.overridesuite, suite)
186                 del changes["suite"][suite]
187                 changes["suite"][oldsuite.overridesuite] = 1
188             else:
189                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
190                     oldsuite.overridesuite, suite)
191
192     # Check for unprocessed byhand files
193     if dbchg is not None:
194         for b in byhand.keys():
195             # Find the file entry in the database
196             found = False
197             for f in dbchg.files:
198                 if f.filename == b:
199                     found = True
200                     # If it's processed, we can ignore it
201                     if f.processed:
202                         del byhand[b]
203                     break
204
205             if not found:
206                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
207
208     # Check for new stuff
209     for suite in changes["suite"].keys():
210         for pkg in new.keys():
211             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
212             if len(ql) > 0:
213                 for file_entry in new[pkg]["files"]:
214                     if files[file_entry].has_key("new"):
215                         del files[file_entry]["new"]
216                 del new[pkg]
217
218     if warn:
219         for s in ['stable', 'oldstable']:
220             if changes["suite"].has_key(s):
221                 print "WARNING: overrides will be added for %s!" % s
222         for pkg in new.keys():
223             if new[pkg].has_key("othercomponents"):
224                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
225
226     return new, byhand
227
228 ################################################################################
229
230 def check_valid(new, session = None):
231     """
232     Check if section and priority for NEW packages exist in database.
233     Additionally does sanity checks:
234       - debian-installer packages have to be udeb (or source)
235       - non debian-installer packages can not be udeb
236       - source priority can only be assigned to dsc file types
237
238     @type new: dict
239     @param new: Dict of new packages with their section, priority and type.
240
241     """
242     for pkg in new.keys():
243         section_name = new[pkg]["section"]
244         priority_name = new[pkg]["priority"]
245         file_type = new[pkg]["type"]
246
247         section = get_section(section_name, session)
248         if section is None:
249             new[pkg]["section id"] = -1
250         else:
251             new[pkg]["section id"] = section.section_id
252
253         priority = get_priority(priority_name, session)
254         if priority is None:
255             new[pkg]["priority id"] = -1
256         else:
257             new[pkg]["priority id"] = priority.priority_id
258
259         # Sanity checks
260         di = section_name.find("debian-installer") != -1
261
262         # If d-i, we must be udeb and vice-versa
263         if     (di and file_type not in ("udeb", "dsc")) or \
264            (not di and file_type == "udeb"):
265             new[pkg]["section id"] = -1
266
267         # If dsc we need to be source and vice-versa
268         if (priority == "source" and file_type != "dsc") or \
269            (priority != "source" and file_type == "dsc"):
270             new[pkg]["priority id"] = -1
271
272 ###############################################################################
273
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276     def __init__(self, future_cutoff, past_cutoff):
277         self.reset()
278         self.future_cutoff = future_cutoff
279         self.past_cutoff = past_cutoff
280
281     def reset(self):
282         self.future_files = {}
283         self.ancient_files = {}
284
285     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286         if MTime > self.future_cutoff:
287             self.future_files[Name] = MTime
288         if MTime < self.past_cutoff:
289             self.ancient_files[Name] = MTime
290
291 ###############################################################################
292
293 def prod_maintainer(notes, upload):
294     cnf = Config()
295
296     # Here we prepare an editor and get them ready to prod...
297     (fd, temp_filename) = utils.temp_filename()
298     temp_file = os.fdopen(fd, 'w')
299     for note in notes:
300         temp_file.write(note.comment)
301     temp_file.close()
302     editor = os.environ.get("EDITOR","vi")
303     answer = 'E'
304     while answer == 'E':
305         os.system("%s %s" % (editor, temp_filename))
306         temp_fh = utils.open_file(temp_filename)
307         prod_message = "".join(temp_fh.readlines())
308         temp_fh.close()
309         print "Prod message:"
310         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
311         prompt = "[P]rod, Edit, Abandon, Quit ?"
312         answer = "XXX"
313         while prompt.find(answer) == -1:
314             answer = utils.our_raw_input(prompt)
315             m = re_default_answer.search(prompt)
316             if answer == "":
317                 answer = m.group(1)
318             answer = answer[:1].upper()
319     os.unlink(temp_filename)
320     if answer == 'A':
321         return
322     elif answer == 'Q':
323         end()
324         sys.exit(0)
325     # Otherwise, do the proding...
326     user_email_address = utils.whoami() + " <%s>" % (
327         cnf["Dinstall::MyAdminAddress"])
328
329     Subst = upload.Subst
330
331     Subst["__FROM_ADDRESS__"] = user_email_address
332     Subst["__PROD_MESSAGE__"] = prod_message
333     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
334
335     prod_mail_message = utils.TemplateSubst(
336         Subst,cnf["Dir::Templates"]+"/process-new.prod")
337
338     # Send the prod mail
339     utils.send_mail(prod_mail_message)
340
341     print "Sent prodding message"
342
343 ################################################################################
344
345 def edit_note(note, upload, session, trainee=False):
346     # Write the current data to a temporary file
347     (fd, temp_filename) = utils.temp_filename()
348     editor = os.environ.get("EDITOR","vi")
349     answer = 'E'
350     while answer == 'E':
351         os.system("%s %s" % (editor, temp_filename))
352         temp_file = utils.open_file(temp_filename)
353         newnote = temp_file.read().rstrip()
354         temp_file.close()
355         print "New Note:"
356         print utils.prefix_multi_line_string(newnote,"  ")
357         prompt = "[D]one, Edit, Abandon, Quit ?"
358         answer = "XXX"
359         while prompt.find(answer) == -1:
360             answer = utils.our_raw_input(prompt)
361             m = re_default_answer.search(prompt)
362             if answer == "":
363                 answer = m.group(1)
364             answer = answer[:1].upper()
365     os.unlink(temp_filename)
366     if answer == 'A':
367         return
368     elif answer == 'Q':
369         end()
370         sys.exit(0)
371
372     comment = NewComment()
373     comment.package = upload.pkg.changes["source"]
374     comment.version = upload.pkg.changes["version"]
375     comment.comment = newnote
376     comment.author  = utils.whoami()
377     comment.trainee = trainee
378     session.add(comment)
379     session.commit()
380
381 ###############################################################################
382
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
385
386 def get_newest_source(source, session):
387     'returns the newest DBSource object in dm_suites'
388     ## the most recent version of the package uploaded to unstable or
389     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390     ## section of its control file
391     q = session.query(DBSource).filter_by(source = source). \
392         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393         order_by(desc('source.version'))
394     return q.first()
395
396 def get_suite_version_by_source(source, session):
397     'returns a list of tuples (suite_name, version) for source package'
398     q = session.query(Suite.suite_name, DBSource.version). \
399         join(Suite.sources).filter_by(source = source)
400     return q.all()
401
402 def get_source_by_package_and_suite(package, suite_name, session):
403     '''
404     returns a DBSource query filtered by DBBinary.package and this package's
405     suite_name
406     '''
407     return session.query(DBSource). \
408         join(DBSource.binaries).filter_by(package = package). \
409         join(DBBinary.suites).filter_by(suite_name = suite_name)
410
411 def get_suite_version_by_package(package, arch_string, session):
412     '''
413     returns a list of tuples (suite_name, version) for binary package and
414     arch_string
415     '''
416     return session.query(Suite.suite_name, DBBinary.version). \
417         join(Suite.binaries).filter_by(package = package). \
418         join(DBBinary.architecture). \
419         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
420
421 class Upload(object):
422     """
423     Everything that has to do with an upload processed.
424
425     """
426     def __init__(self):
427         self.logger = None
428         self.pkg = Changes()
429         self.reset()
430
431     ###########################################################################
432
433     def reset (self):
434         """ Reset a number of internal variables."""
435
436         # Initialize the substitution template map
437         cnf = Config()
438         self.Subst = {}
439         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
443
444         self.rejects = []
445         self.warnings = []
446         self.notes = []
447
448         self.later_check_files = []
449
450         self.pkg.reset()
451
452     def package_info(self):
453         """
454         Format various messages from this Upload to send to the maintainer.
455         """
456
457         msgs = (
458             ('Reject Reasons', self.rejects),
459             ('Warnings', self.warnings),
460             ('Notes', self.notes),
461         )
462
463         msg = ''
464         for title, messages in msgs:
465             if messages:
466                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
467         msg += '\n\n'
468
469         return msg
470
471     ###########################################################################
472     def update_subst(self):
473         """ Set up the per-package template substitution mappings """
474
475         cnf = Config()
476
477         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478         if not self.pkg.changes.has_key("architecture") or not \
479            isinstance(self.pkg.changes["architecture"], dict):
480             self.pkg.changes["architecture"] = { "Unknown" : "" }
481
482         # and maintainer2047 may not exist.
483         if not self.pkg.changes.has_key("maintainer2047"):
484             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
485
486         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
489
490         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491         if self.pkg.changes["architecture"].has_key("source") and \
492            self.pkg.changes["changedby822"] != "" and \
493            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
494
495             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
498         else:
499             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
502
503         # Process policy doesn't set the fingerprint field and I don't want to make it
504         # do it for now as I don't want to have to deal with the case where we accepted
505         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506         # the meantime so the package will be remarked as rejectable.  Urgh.
507         # TODO: Fix this properly
508         if self.pkg.changes.has_key('fingerprint'):
509             session = DBConn().session()
510             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512                 if self.pkg.changes.has_key("sponsoremail"):
513                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
514             session.close()
515
516         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
518
519         # Apply any global override of the Maintainer field
520         if cnf.get("Dinstall::OverrideMaintainer"):
521             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
523
524         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
528
529     ###########################################################################
530     def load_changes(self, filename):
531         """
532         Load a changes file and setup a dictionary around it. Also checks for mandantory
533         fields  within.
534
535         @type filename: string
536         @param filename: Changes filename, full path.
537
538         @rtype: boolean
539         @return: whether the changes file was valid or not.  We may want to
540                  reject even if this is True (see what gets put in self.rejects).
541                  This is simply to prevent us even trying things later which will
542                  fail because we couldn't properly parse the file.
543         """
544         Cnf = Config()
545         self.pkg.changes_file = filename
546
547         # Parse the .changes field into a dictionary
548         try:
549             self.pkg.changes.update(parse_changes(filename))
550         except CantOpenError:
551             self.rejects.append("%s: can't read file." % (filename))
552             return False
553         except ParseChangesError, line:
554             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555             return False
556         except ChangesUnicodeError:
557             self.rejects.append("%s: changes file not proper utf-8" % (filename))
558             return False
559
560         # Parse the Files field from the .changes into another dictionary
561         try:
562             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563         except ParseChangesError, line:
564             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
565             return False
566         except UnknownFormatError, format:
567             self.rejects.append("%s: unknown format '%s'." % (filename, format))
568             return False
569
570         # Check for mandatory fields
571         for i in ("distribution", "source", "binary", "architecture",
572                   "version", "maintainer", "files", "changes", "description"):
573             if not self.pkg.changes.has_key(i):
574                 # Avoid undefined errors later
575                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
576                 return False
577
578         # Strip a source version in brackets from the source field
579         if re_strip_srcver.search(self.pkg.changes["source"]):
580             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
581
582         # Ensure the source field is a valid package name.
583         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
585
586         # Split multi-value fields into a lower-level dictionary
587         for i in ("architecture", "distribution", "binary", "closes"):
588             o = self.pkg.changes.get(i, "")
589             if o != "":
590                 del self.pkg.changes[i]
591
592             self.pkg.changes[i] = {}
593
594             for j in o.split():
595                 self.pkg.changes[i][j] = 1
596
597         # Fix the Maintainer: field to be RFC822/2047 compatible
598         try:
599             (self.pkg.changes["maintainer822"],
600              self.pkg.changes["maintainer2047"],
601              self.pkg.changes["maintainername"],
602              self.pkg.changes["maintaineremail"]) = \
603                    fix_maintainer (self.pkg.changes["maintainer"])
604         except ParseMaintError, msg:
605             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606                    % (filename, self.pkg.changes["maintainer"], msg))
607
608         # ...likewise for the Changed-By: field if it exists.
609         try:
610             (self.pkg.changes["changedby822"],
611              self.pkg.changes["changedby2047"],
612              self.pkg.changes["changedbyname"],
613              self.pkg.changes["changedbyemail"]) = \
614                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
615         except ParseMaintError, msg:
616             self.pkg.changes["changedby822"] = ""
617             self.pkg.changes["changedby2047"] = ""
618             self.pkg.changes["changedbyname"] = ""
619             self.pkg.changes["changedbyemail"] = ""
620
621             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622                    % (filename, self.pkg.changes["changed-by"], msg))
623
624         # Ensure all the values in Closes: are numbers
625         if self.pkg.changes.has_key("closes"):
626             for i in self.pkg.changes["closes"].keys():
627                 if re_isanum.match (i) == None:
628                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
629
630         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
633
634         # Check the .changes is non-empty
635         if not self.pkg.files:
636             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
637             return False
638
639         # Changes was syntactically valid even if we'll reject
640         return True
641
642     ###########################################################################
643
644     def check_distributions(self):
645         "Check and map the Distribution field"
646
647         Cnf = Config()
648
649         # Handle suite mappings
650         for m in Cnf.ValueList("SuiteMappings"):
651             args = m.split()
652             mtype = args[0]
653             if mtype == "map" or mtype == "silent-map":
654                 (source, dest) = args[1:3]
655                 if self.pkg.changes["distribution"].has_key(source):
656                     del self.pkg.changes["distribution"][source]
657                     self.pkg.changes["distribution"][dest] = 1
658                     if mtype != "silent-map":
659                         self.notes.append("Mapping %s to %s." % (source, dest))
660                 if self.pkg.changes.has_key("distribution-version"):
661                     if self.pkg.changes["distribution-version"].has_key(source):
662                         self.pkg.changes["distribution-version"][source]=dest
663             elif mtype == "map-unreleased":
664                 (source, dest) = args[1:3]
665                 if self.pkg.changes["distribution"].has_key(source):
666                     for arch in self.pkg.changes["architecture"].keys():
667                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669                             del self.pkg.changes["distribution"][source]
670                             self.pkg.changes["distribution"][dest] = 1
671                             break
672             elif mtype == "ignore":
673                 suite = args[1]
674                 if self.pkg.changes["distribution"].has_key(suite):
675                     del self.pkg.changes["distribution"][suite]
676                     self.warnings.append("Ignoring %s as a target suite." % (suite))
677             elif mtype == "reject":
678                 suite = args[1]
679                 if self.pkg.changes["distribution"].has_key(suite):
680                     self.rejects.append("Uploads to %s are not accepted." % (suite))
681             elif mtype == "propup-version":
682                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
683                 #
684                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685                 if self.pkg.changes["distribution"].has_key(args[1]):
686                     self.pkg.changes.setdefault("distribution-version", {})
687                     for suite in args[2:]:
688                         self.pkg.changes["distribution-version"][suite] = suite
689
690         # Ensure there is (still) a target distribution
691         if len(self.pkg.changes["distribution"].keys()) < 1:
692             self.rejects.append("No valid distribution remaining.")
693
694         # Ensure target distributions exist
695         for suite in self.pkg.changes["distribution"].keys():
696             if not Cnf.has_key("Suite::%s" % (suite)):
697                 self.rejects.append("Unknown distribution `%s'." % (suite))
698
699     ###########################################################################
700
701     def binary_file_checks(self, f, session):
702         cnf = Config()
703         entry = self.pkg.files[f]
704
705         # Extract package control information
706         deb_file = utils.open_file(f)
707         try:
708             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
709         except:
710             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
711             deb_file.close()
712             # Can't continue, none of the checks on control would work.
713             return
714
715         # Check for mandantory "Description:"
716         deb_file.seek(0)
717         try:
718             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
719         except:
720             self.rejects.append("%s: Missing Description in binary package" % (f))
721             return
722
723         deb_file.close()
724
725         # Check for mandatory fields
726         for field in [ "Package", "Architecture", "Version" ]:
727             if control.Find(field) == None:
728                 # Can't continue
729                 self.rejects.append("%s: No %s field in control." % (f, field))
730                 return
731
732         # Ensure the package name matches the one give in the .changes
733         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
735
736         # Validate the package field
737         package = control.Find("Package")
738         if not re_valid_pkg_name.match(package):
739             self.rejects.append("%s: invalid package name '%s'." % (f, package))
740
741         # Validate the version field
742         version = control.Find("Version")
743         if not re_valid_version.match(version):
744             self.rejects.append("%s: invalid version number '%s'." % (f, version))
745
746         # Ensure the architecture of the .deb is one we know about.
747         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748         architecture = control.Find("Architecture")
749         upload_suite = self.pkg.changes["distribution"].keys()[0]
750
751         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753             self.rejects.append("Unknown architecture '%s'." % (architecture))
754
755         # Ensure the architecture of the .deb is one of the ones
756         # listed in the .changes.
757         if not self.pkg.changes["architecture"].has_key(architecture):
758             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
759
760         # Sanity-check the Depends field
761         depends = control.Find("Depends")
762         if depends == '':
763             self.rejects.append("%s: Depends field is empty." % (f))
764
765         # Sanity-check the Provides field
766         provides = control.Find("Provides")
767         if provides:
768             provide = re_spacestrip.sub('', provides)
769             if provide == '':
770                 self.rejects.append("%s: Provides field is empty." % (f))
771             prov_list = provide.split(",")
772             for prov in prov_list:
773                 if not re_valid_pkg_name.match(prov):
774                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
775
776         # Check the section & priority match those given in the .changes (non-fatal)
777         if     control.Find("Section") and entry["section"] != "" \
778            and entry["section"] != control.Find("Section"):
779             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
780                                 (f, control.Find("Section", ""), entry["section"]))
781         if control.Find("Priority") and entry["priority"] != "" \
782            and entry["priority"] != control.Find("Priority"):
783             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
784                                 (f, control.Find("Priority", ""), entry["priority"]))
785
786         entry["package"] = package
787         entry["architecture"] = architecture
788         entry["version"] = version
789         entry["maintainer"] = control.Find("Maintainer", "")
790
791         if f.endswith(".udeb"):
792             self.pkg.files[f]["dbtype"] = "udeb"
793         elif f.endswith(".deb"):
794             self.pkg.files[f]["dbtype"] = "deb"
795         else:
796             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
797
798         entry["source"] = control.Find("Source", entry["package"])
799
800         # Get the source version
801         source = entry["source"]
802         source_version = ""
803
804         if source.find("(") != -1:
805             m = re_extract_src_version.match(source)
806             source = m.group(1)
807             source_version = m.group(2)
808
809         if not source_version:
810             source_version = self.pkg.files[f]["version"]
811
812         entry["source package"] = source
813         entry["source version"] = source_version
814
815         # Ensure the filename matches the contents of the .deb
816         m = re_isadeb.match(f)
817
818         #  package name
819         file_package = m.group(1)
820         if entry["package"] != file_package:
821             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
822                                 (f, file_package, entry["dbtype"], entry["package"]))
823         epochless_version = re_no_epoch.sub('', control.Find("Version"))
824
825         #  version
826         file_version = m.group(2)
827         if epochless_version != file_version:
828             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
829                                 (f, file_version, entry["dbtype"], epochless_version))
830
831         #  architecture
832         file_architecture = m.group(3)
833         if entry["architecture"] != file_architecture:
834             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
835                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
836
837         # Check for existent source
838         source_version = entry["source version"]
839         source_package = entry["source package"]
840         if self.pkg.changes["architecture"].has_key("source"):
841             if source_version != self.pkg.changes["version"]:
842                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
843                                     (source_version, f, self.pkg.changes["version"]))
844         else:
845             # Check in the SQL database
846             if not source_exists(source_package, source_version, suites = \
847                 self.pkg.changes["distribution"].keys(), session = session):
848                 # Check in one of the other directories
849                 source_epochless_version = re_no_epoch.sub('', source_version)
850                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
851                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
852                     entry["byhand"] = 1
853                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
854                     entry["new"] = 1
855                 else:
856                     dsc_file_exists = False
857                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
858                         if cnf.has_key("Dir::Queue::%s" % (myq)):
859                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
860                                 dsc_file_exists = True
861                                 break
862
863                     if not dsc_file_exists:
864                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
865
866         # Check the version and for file overwrites
867         self.check_binary_against_db(f, session)
868
869         # Temporarily disable contents generation until we change the table storage layout
870         #b = Binary(f)
871         #b.scan_package()
872         #if len(b.rejects) > 0:
873         #    for j in b.rejects:
874         #        self.rejects.append(j)
875
876     def source_file_checks(self, f, session):
877         entry = self.pkg.files[f]
878
879         m = re_issource.match(f)
880         if not m:
881             return
882
883         entry["package"] = m.group(1)
884         entry["version"] = m.group(2)
885         entry["type"] = m.group(3)
886
887         # Ensure the source package name matches the Source filed in the .changes
888         if self.pkg.changes["source"] != entry["package"]:
889             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
890
891         # Ensure the source version matches the version in the .changes file
892         if re_is_orig_source.match(f):
893             changes_version = self.pkg.changes["chopversion2"]
894         else:
895             changes_version = self.pkg.changes["chopversion"]
896
897         if changes_version != entry["version"]:
898             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
899
900         # Ensure the .changes lists source in the Architecture field
901         if not self.pkg.changes["architecture"].has_key("source"):
902             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
903
904         # Check the signature of a .dsc file
905         if entry["type"] == "dsc":
906             # check_signature returns either:
907             #  (None, [list, of, rejects]) or (signature, [])
908             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
909             for j in rejects:
910                 self.rejects.append(j)
911
912         entry["architecture"] = "source"
913
914     def per_suite_file_checks(self, f, suite, session):
915         cnf = Config()
916         entry = self.pkg.files[f]
917
918         # Skip byhand
919         if entry.has_key("byhand"):
920             return
921
922         # Check we have fields we need to do these checks
923         oktogo = True
924         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
925             if not entry.has_key(m):
926                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
927                 oktogo = False
928
929         if not oktogo:
930             return
931
932         # Handle component mappings
933         for m in cnf.ValueList("ComponentMappings"):
934             (source, dest) = m.split()
935             if entry["component"] == source:
936                 entry["original component"] = source
937                 entry["component"] = dest
938
939         # Ensure the component is valid for the target suite
940         if cnf.has_key("Suite:%s::Components" % (suite)) and \
941            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
942             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
943             return
944
945         # Validate the component
946         if not get_component(entry["component"], session):
947             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
948             return
949
950         # See if the package is NEW
951         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
952             entry["new"] = 1
953
954         # Validate the priority
955         if entry["priority"].find('/') != -1:
956             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
957
958         # Determine the location
959         location = cnf["Dir::Pool"]
960         l = get_location(location, entry["component"], session=session)
961         if l is None:
962             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
963             entry["location id"] = -1
964         else:
965             entry["location id"] = l.location_id
966
967         # Check the md5sum & size against existing files (if any)
968         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
969
970         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
971                                          entry["size"], entry["md5sum"], entry["location id"])
972
973         if found is None:
974             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
975         elif found is False and poolfile is not None:
976             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
977         else:
978             if poolfile is None:
979                 entry["files id"] = None
980             else:
981                 entry["files id"] = poolfile.file_id
982
983         # Check for packages that have moved from one component to another
984         entry['suite'] = suite
985         arch_list = [entry["architecture"], 'all']
986         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
987             [suite], arch_list = arch_list, session = session)
988         if component is not None:
989             entry["othercomponents"] = component
990
991     def check_files(self, action=True):
992         file_keys = self.pkg.files.keys()
993         holding = Holding()
994         cnf = Config()
995
996         if action:
997             cwd = os.getcwd()
998             os.chdir(self.pkg.directory)
999             for f in file_keys:
1000                 ret = holding.copy_to_holding(f)
1001                 if ret is not None:
1002                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1003
1004             os.chdir(cwd)
1005
1006         # check we already know the changes file
1007         # [NB: this check must be done post-suite mapping]
1008         base_filename = os.path.basename(self.pkg.changes_file)
1009
1010         session = DBConn().session()
1011
1012         try:
1013             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1014             # if in the pool or in a queue other than unchecked, reject
1015             if (dbc.in_queue is None) \
1016                    or (dbc.in_queue is not None
1017                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1018                 self.rejects.append("%s file already known to dak" % base_filename)
1019         except NoResultFound, e:
1020             # not known, good
1021             pass
1022
1023         has_binaries = False
1024         has_source = False
1025
1026         for f, entry in self.pkg.files.items():
1027             # Ensure the file does not already exist in one of the accepted directories
1028             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1029                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1030                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1031                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1032
1033             if not re_taint_free.match(f):
1034                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1035
1036             # Check the file is readable
1037             if os.access(f, os.R_OK) == 0:
1038                 # When running in -n, copy_to_holding() won't have
1039                 # generated the reject_message, so we need to.
1040                 if action:
1041                     if os.path.exists(f):
1042                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1043                     else:
1044                         # Don't directly reject, mark to check later to deal with orig's
1045                         # we can find in the pool
1046                         self.later_check_files.append(f)
1047                 entry["type"] = "unreadable"
1048                 continue
1049
1050             # If it's byhand skip remaining checks
1051             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1052                 entry["byhand"] = 1
1053                 entry["type"] = "byhand"
1054
1055             # Checks for a binary package...
1056             elif re_isadeb.match(f):
1057                 has_binaries = True
1058                 entry["type"] = "deb"
1059
1060                 # This routine appends to self.rejects/warnings as appropriate
1061                 self.binary_file_checks(f, session)
1062
1063             # Checks for a source package...
1064             elif re_issource.match(f):
1065                 has_source = True
1066
1067                 # This routine appends to self.rejects/warnings as appropriate
1068                 self.source_file_checks(f, session)
1069
1070             # Not a binary or source package?  Assume byhand...
1071             else:
1072                 entry["byhand"] = 1
1073                 entry["type"] = "byhand"
1074
1075             # Per-suite file checks
1076             entry["oldfiles"] = {}
1077             for suite in self.pkg.changes["distribution"].keys():
1078                 self.per_suite_file_checks(f, suite, session)
1079
1080         session.close()
1081
1082         # If the .changes file says it has source, it must have source.
1083         if self.pkg.changes["architecture"].has_key("source"):
1084             if not has_source:
1085                 self.rejects.append("no source found and Architecture line in changes mention source.")
1086
1087             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1088                 self.rejects.append("source only uploads are not supported.")
1089
1090     ###########################################################################
1091     def check_dsc(self, action=True, session=None):
1092         """Returns bool indicating whether or not the source changes are valid"""
1093         # Ensure there is source to check
1094         if not self.pkg.changes["architecture"].has_key("source"):
1095             return True
1096
1097         # Find the .dsc
1098         dsc_filename = None
1099         for f, entry in self.pkg.files.items():
1100             if entry["type"] == "dsc":
1101                 if dsc_filename:
1102                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1103                     return False
1104                 else:
1105                     dsc_filename = f
1106
1107         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1108         if not dsc_filename:
1109             self.rejects.append("source uploads must contain a dsc file")
1110             return False
1111
1112         # Parse the .dsc file
1113         try:
1114             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1115         except CantOpenError:
1116             # if not -n copy_to_holding() will have done this for us...
1117             if not action:
1118                 self.rejects.append("%s: can't read file." % (dsc_filename))
1119         except ParseChangesError, line:
1120             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1121         except InvalidDscError, line:
1122             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1123         except ChangesUnicodeError:
1124             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1125
1126         # Build up the file list of files mentioned by the .dsc
1127         try:
1128             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1129         except NoFilesFieldError:
1130             self.rejects.append("%s: no Files: field." % (dsc_filename))
1131             return False
1132         except UnknownFormatError, format:
1133             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1134             return False
1135         except ParseChangesError, line:
1136             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1137             return False
1138
1139         # Enforce mandatory fields
1140         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1141             if not self.pkg.dsc.has_key(i):
1142                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1143                 return False
1144
1145         # Validate the source and version fields
1146         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1147             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1148         if not re_valid_version.match(self.pkg.dsc["version"]):
1149             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1150
1151         # Only a limited list of source formats are allowed in each suite
1152         for dist in self.pkg.changes["distribution"].keys():
1153             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1154             if self.pkg.dsc["format"] not in allowed:
1155                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1156
1157         # Validate the Maintainer field
1158         try:
1159             # We ignore the return value
1160             fix_maintainer(self.pkg.dsc["maintainer"])
1161         except ParseMaintError, msg:
1162             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1163                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1164
1165         # Validate the build-depends field(s)
1166         for field_name in [ "build-depends", "build-depends-indep" ]:
1167             field = self.pkg.dsc.get(field_name)
1168             if field:
1169                 # Have apt try to parse them...
1170                 try:
1171                     apt_pkg.ParseSrcDepends(field)
1172                 except:
1173                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1174
1175         # Ensure the version number in the .dsc matches the version number in the .changes
1176         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1177         changes_version = self.pkg.files[dsc_filename]["version"]
1178
1179         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1180             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1181
1182         # Ensure the Files field contain only what's expected
1183         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1184
1185         # Ensure source is newer than existing source in target suites
1186         session = DBConn().session()
1187         self.check_source_against_db(dsc_filename, session)
1188         self.check_dsc_against_db(dsc_filename, session)
1189
1190         dbchg = get_dbchange(self.pkg.changes_file, session)
1191
1192         # Finally, check if we're missing any files
1193         for f in self.later_check_files:
1194             print 'XXX: %s' % f
1195             # Check if we've already processed this file if we have a dbchg object
1196             ok = False
1197             if dbchg:
1198                 for pf in dbchg.files:
1199                     if pf.filename == f and pf.processed:
1200                         self.notes.append('%s was already processed so we can go ahead' % f)
1201                         ok = True
1202                         del self.pkg.files[f]
1203             if not ok:
1204                 self.rejects.append("Could not find file %s references in changes" % f)
1205
1206         session.close()
1207
1208         return True
1209
1210     ###########################################################################
1211
1212     def get_changelog_versions(self, source_dir):
1213         """Extracts a the source package and (optionally) grabs the
1214         version history out of debian/changelog for the BTS."""
1215
1216         cnf = Config()
1217
1218         # Find the .dsc (again)
1219         dsc_filename = None
1220         for f in self.pkg.files.keys():
1221             if self.pkg.files[f]["type"] == "dsc":
1222                 dsc_filename = f
1223
1224         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1225         if not dsc_filename:
1226             return
1227
1228         # Create a symlink mirror of the source files in our temporary directory
1229         for f in self.pkg.files.keys():
1230             m = re_issource.match(f)
1231             if m:
1232                 src = os.path.join(source_dir, f)
1233                 # If a file is missing for whatever reason, give up.
1234                 if not os.path.exists(src):
1235                     return
1236                 ftype = m.group(3)
1237                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1238                    self.pkg.orig_files[f].has_key("path"):
1239                     continue
1240                 dest = os.path.join(os.getcwd(), f)
1241                 os.symlink(src, dest)
1242
1243         # If the orig files are not a part of the upload, create symlinks to the
1244         # existing copies.
1245         for orig_file in self.pkg.orig_files.keys():
1246             if not self.pkg.orig_files[orig_file].has_key("path"):
1247                 continue
1248             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1249             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1250
1251         # Extract the source
1252         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1253         (result, output) = commands.getstatusoutput(cmd)
1254         if (result != 0):
1255             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1256             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1257             return
1258
1259         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1260             return
1261
1262         # Get the upstream version
1263         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1264         if re_strip_revision.search(upstr_version):
1265             upstr_version = re_strip_revision.sub('', upstr_version)
1266
1267         # Ensure the changelog file exists
1268         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1269         if not os.path.exists(changelog_filename):
1270             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1271             return
1272
1273         # Parse the changelog
1274         self.pkg.dsc["bts changelog"] = ""
1275         changelog_file = utils.open_file(changelog_filename)
1276         for line in changelog_file.readlines():
1277             m = re_changelog_versions.match(line)
1278             if m:
1279                 self.pkg.dsc["bts changelog"] += line
1280         changelog_file.close()
1281
1282         # Check we found at least one revision in the changelog
1283         if not self.pkg.dsc["bts changelog"]:
1284             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1285
1286     def check_source(self):
1287         # Bail out if:
1288         #    a) there's no source
1289         if not self.pkg.changes["architecture"].has_key("source"):
1290             return
1291
1292         tmpdir = utils.temp_dirname()
1293
1294         # Move into the temporary directory
1295         cwd = os.getcwd()
1296         os.chdir(tmpdir)
1297
1298         # Get the changelog version history
1299         self.get_changelog_versions(cwd)
1300
1301         # Move back and cleanup the temporary tree
1302         os.chdir(cwd)
1303
1304         try:
1305             shutil.rmtree(tmpdir)
1306         except OSError, e:
1307             if e.errno != errno.EACCES:
1308                 print "foobar"
1309                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1310
1311             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1312             # We probably have u-r or u-w directories so chmod everything
1313             # and try again.
1314             cmd = "chmod -R u+rwx %s" % (tmpdir)
1315             result = os.system(cmd)
1316             if result != 0:
1317                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1318             shutil.rmtree(tmpdir)
1319         except Exception, e:
1320             print "foobar2 (%s)" % e
1321             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1322
1323     ###########################################################################
1324     def ensure_hashes(self):
1325         # Make sure we recognise the format of the Files: field in the .changes
1326         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1327         if len(format) == 2:
1328             format = int(format[0]), int(format[1])
1329         else:
1330             format = int(float(format[0])), 0
1331
1332         # We need to deal with the original changes blob, as the fields we need
1333         # might not be in the changes dict serialised into the .dak anymore.
1334         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1335
1336         # Copy the checksums over to the current changes dict.  This will keep
1337         # the existing modifications to it intact.
1338         for field in orig_changes:
1339             if field.startswith('checksums-'):
1340                 self.pkg.changes[field] = orig_changes[field]
1341
1342         # Check for unsupported hashes
1343         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1344             self.rejects.append(j)
1345
1346         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1347             self.rejects.append(j)
1348
1349         # We have to calculate the hash if we have an earlier changes version than
1350         # the hash appears in rather than require it exist in the changes file
1351         for hashname, hashfunc, version in utils.known_hashes:
1352             # TODO: Move _ensure_changes_hash into this class
1353             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1354                 self.rejects.append(j)
1355             if "source" in self.pkg.changes["architecture"]:
1356                 # TODO: Move _ensure_dsc_hash into this class
1357                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1358                     self.rejects.append(j)
1359
1360     def check_hashes(self):
1361         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1362             self.rejects.append(m)
1363
1364         for m in utils.check_size(".changes", self.pkg.files):
1365             self.rejects.append(m)
1366
1367         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1368             self.rejects.append(m)
1369
1370         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1371             self.rejects.append(m)
1372
1373         self.ensure_hashes()
1374
1375     ###########################################################################
1376
1377     def ensure_orig(self, target_dir='.', session=None):
1378         """
1379         Ensures that all orig files mentioned in the changes file are present
1380         in target_dir. If they do not exist, they are symlinked into place.
1381
1382         An list containing the symlinks that were created are returned (so they
1383         can be removed).
1384         """
1385
1386         symlinked = []
1387         cnf = Config()
1388
1389         for filename, entry in self.pkg.dsc_files.iteritems():
1390             if not re_is_orig_source.match(filename):
1391                 # File is not an orig; ignore
1392                 continue
1393
1394             if os.path.exists(filename):
1395                 # File exists, no need to continue
1396                 continue
1397
1398             def symlink_if_valid(path):
1399                 f = utils.open_file(path)
1400                 md5sum = apt_pkg.md5sum(f)
1401                 f.close()
1402
1403                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1404                 expected = (int(entry['size']), entry['md5sum'])
1405
1406                 if fingerprint != expected:
1407                     return False
1408
1409                 dest = os.path.join(target_dir, filename)
1410
1411                 os.symlink(path, dest)
1412                 symlinked.append(dest)
1413
1414                 return True
1415
1416             session_ = session
1417             if session is None:
1418                 session_ = DBConn().session()
1419
1420             found = False
1421
1422             # Look in the pool
1423             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1424                 poolfile_path = os.path.join(
1425                     poolfile.location.path, poolfile.filename
1426                 )
1427
1428                 if symlink_if_valid(poolfile_path):
1429                     found = True
1430                     break
1431
1432             if session is None:
1433                 session_.close()
1434
1435             if found:
1436                 continue
1437
1438             # Look in some other queues for the file
1439             queues = ('New', 'Byhand', 'ProposedUpdates',
1440                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1441
1442             for queue in queues:
1443                 if not cnf.get('Dir::Queue::%s' % queue):
1444                     continue
1445
1446                 queuefile_path = os.path.join(
1447                     cnf['Dir::Queue::%s' % queue], filename
1448                 )
1449
1450                 if not os.path.exists(queuefile_path):
1451                     # Does not exist in this queue
1452                     continue
1453
1454                 if symlink_if_valid(queuefile_path):
1455                     break
1456
1457         return symlinked
1458
1459     ###########################################################################
1460
1461     def check_lintian(self):
1462         """
1463         Extends self.rejects by checking the output of lintian against tags
1464         specified in Dinstall::LintianTags.
1465         """
1466
1467         cnf = Config()
1468
1469         # Don't reject binary uploads
1470         if not self.pkg.changes['architecture'].has_key('source'):
1471             return
1472
1473         # Only check some distributions
1474         for dist in ('unstable', 'experimental'):
1475             if dist in self.pkg.changes['distribution']:
1476                 break
1477         else:
1478             return
1479
1480         # If we do not have a tagfile, don't do anything
1481         tagfile = cnf.get("Dinstall::LintianTags")
1482         if tagfile is None:
1483             return
1484
1485         # Parse the yaml file
1486         sourcefile = file(tagfile, 'r')
1487         sourcecontent = sourcefile.read()
1488         sourcefile.close()
1489
1490         try:
1491             lintiantags = yaml.load(sourcecontent)['lintian']
1492         except yaml.YAMLError, msg:
1493             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1494             return
1495
1496         # Try and find all orig mentioned in the .dsc
1497         symlinked = self.ensure_orig()
1498
1499         # Setup the input file for lintian
1500         fd, temp_filename = utils.temp_filename()
1501         temptagfile = os.fdopen(fd, 'w')
1502         for tags in lintiantags.values():
1503             temptagfile.writelines(['%s\n' % x for x in tags])
1504         temptagfile.close()
1505
1506         try:
1507             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1508                 (temp_filename, self.pkg.changes_file)
1509
1510             result, output = commands.getstatusoutput(cmd)
1511         finally:
1512             # Remove our tempfile and any symlinks we created
1513             os.unlink(temp_filename)
1514
1515             for symlink in symlinked:
1516                 os.unlink(symlink)
1517
1518         if result == 2:
1519             utils.warn("lintian failed for %s [return code: %s]." % \
1520                 (self.pkg.changes_file, result))
1521             utils.warn(utils.prefix_multi_line_string(output, \
1522                 " [possible output:] "))
1523
1524         def log(*txt):
1525             if self.logger:
1526                 self.logger.log(
1527                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1528                 )
1529
1530         # Generate messages
1531         parsed_tags = parse_lintian_output(output)
1532         self.rejects.extend(
1533             generate_reject_messages(parsed_tags, lintiantags, log=log)
1534         )
1535
1536     ###########################################################################
1537     def check_urgency(self):
1538         cnf = Config()
1539         if self.pkg.changes["architecture"].has_key("source"):
1540             if not self.pkg.changes.has_key("urgency"):
1541                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1542             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1543             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1544                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1545                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1546                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1547
1548     ###########################################################################
1549
1550     # Sanity check the time stamps of files inside debs.
1551     # [Files in the near future cause ugly warnings and extreme time
1552     #  travel can cause errors on extraction]
1553
1554     def check_timestamps(self):
1555         Cnf = Config()
1556
1557         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1558         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1559         tar = TarTime(future_cutoff, past_cutoff)
1560
1561         for filename, entry in self.pkg.files.items():
1562             if entry["type"] == "deb":
1563                 tar.reset()
1564                 try:
1565                     deb_file = utils.open_file(filename)
1566                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1567                     deb_file.seek(0)
1568                     try:
1569                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1570                     except SystemError, e:
1571                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1572                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1573                             raise
1574                         deb_file.seek(0)
1575                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1576
1577                     deb_file.close()
1578
1579                     future_files = tar.future_files.keys()
1580                     if future_files:
1581                         num_future_files = len(future_files)
1582                         future_file = future_files[0]
1583                         future_date = tar.future_files[future_file]
1584                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1585                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1586
1587                     ancient_files = tar.ancient_files.keys()
1588                     if ancient_files:
1589                         num_ancient_files = len(ancient_files)
1590                         ancient_file = ancient_files[0]
1591                         ancient_date = tar.ancient_files[ancient_file]
1592                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1593                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1594                 except:
1595                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1596
1597     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1598         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1599             sponsored = False
1600         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1601             sponsored = False
1602             if uid_name == "":
1603                 sponsored = True
1604         else:
1605             sponsored = True
1606             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1607                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1608                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1609                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1610                         self.pkg.changes["sponsoremail"] = uid_email
1611
1612         return sponsored
1613
1614
1615     ###########################################################################
1616     # check_signed_by_key checks
1617     ###########################################################################
1618
1619     def check_signed_by_key(self):
1620         """Ensure the .changes is signed by an authorized uploader."""
1621         session = DBConn().session()
1622
1623         # First of all we check that the person has proper upload permissions
1624         # and that this upload isn't blocked
1625         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1626
1627         if fpr is None:
1628             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1629             return
1630
1631         # TODO: Check that import-keyring adds UIDs properly
1632         if not fpr.uid:
1633             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1634             return
1635
1636         # Check that the fingerprint which uploaded has permission to do so
1637         self.check_upload_permissions(fpr, session)
1638
1639         # Check that this package is not in a transition
1640         self.check_transition(session)
1641
1642         session.close()
1643
1644
1645     def check_upload_permissions(self, fpr, session):
1646         # Check any one-off upload blocks
1647         self.check_upload_blocks(fpr, session)
1648
1649         # Start with DM as a special case
1650         # DM is a special case unfortunately, so we check it first
1651         # (keys with no source access get more access than DMs in one
1652         #  way; DMs can only upload for their packages whether source
1653         #  or binary, whereas keys with no access might be able to
1654         #  upload some binaries)
1655         if fpr.source_acl.access_level == 'dm':
1656             self.check_dm_upload(fpr, session)
1657         else:
1658             # Check source-based permissions for other types
1659             if self.pkg.changes["architecture"].has_key("source") and \
1660                 fpr.source_acl.access_level is None:
1661                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1662                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1663                 self.rejects.append(rej)
1664                 return
1665             # If not a DM, we allow full upload rights
1666             uid_email = "%s@debian.org" % (fpr.uid.uid)
1667             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1668
1669
1670         # Check binary upload permissions
1671         # By this point we know that DMs can't have got here unless they
1672         # are allowed to deal with the package concerned so just apply
1673         # normal checks
1674         if fpr.binary_acl.access_level == 'full':
1675             return
1676
1677         # Otherwise we're in the map case
1678         tmparches = self.pkg.changes["architecture"].copy()
1679         tmparches.pop('source', None)
1680
1681         for bam in fpr.binary_acl_map:
1682             tmparches.pop(bam.architecture.arch_string, None)
1683
1684         if len(tmparches.keys()) > 0:
1685             if fpr.binary_reject:
1686                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1687                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1688                 self.rejects.append(rej)
1689             else:
1690                 # TODO: This is where we'll implement reject vs throw away binaries later
1691                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1692                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1693                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1694                 self.rejects.append(rej)
1695
1696
1697     def check_upload_blocks(self, fpr, session):
1698         """Check whether any upload blocks apply to this source, source
1699            version, uid / fpr combination"""
1700
1701         def block_rej_template(fb):
1702             rej = 'Manual upload block in place for package %s' % fb.source
1703             if fb.version is not None:
1704                 rej += ', version %s' % fb.version
1705             return rej
1706
1707         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1708             # version is None if the block applies to all versions
1709             if fb.version is None or fb.version == self.pkg.changes['version']:
1710                 # Check both fpr and uid - either is enough to cause a reject
1711                 if fb.fpr is not None:
1712                     if fb.fpr.fingerprint == fpr.fingerprint:
1713                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1714                 if fb.uid is not None:
1715                     if fb.uid == fpr.uid:
1716                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1717
1718
1719     def check_dm_upload(self, fpr, session):
1720         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1721         ## none of the uploaded packages are NEW
1722         rej = False
1723         for f in self.pkg.files.keys():
1724             if self.pkg.files[f].has_key("byhand"):
1725                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1726                 rej = True
1727             if self.pkg.files[f].has_key("new"):
1728                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1729                 rej = True
1730
1731         if rej:
1732             return
1733
1734         r = get_newest_source(self.pkg.changes["source"], session)
1735
1736         if r is None:
1737             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1738             self.rejects.append(rej)
1739             return
1740
1741         if not r.dm_upload_allowed:
1742             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1743             self.rejects.append(rej)
1744             return
1745
1746         ## the Maintainer: field of the uploaded .changes file corresponds with
1747         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1748         ## uploads)
1749         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1750             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1751
1752         ## the most recent version of the package uploaded to unstable or
1753         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1754         ## non-developer maintainers cannot NMU or hijack packages)
1755
1756         # srcuploaders includes the maintainer
1757         accept = False
1758         for sup in r.srcuploaders:
1759             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1760             # Eww - I hope we never have two people with the same name in Debian
1761             if email == fpr.uid.uid or name == fpr.uid.name:
1762                 accept = True
1763                 break
1764
1765         if not accept:
1766             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1767             return
1768
1769         ## none of the packages are being taken over from other source packages
1770         for b in self.pkg.changes["binary"].keys():
1771             for suite in self.pkg.changes["distribution"].keys():
1772                 for s in get_source_by_package_and_suite(b, suite, session):
1773                     if s.source != self.pkg.changes["source"]:
1774                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1775
1776
1777
1778     def check_transition(self, session):
1779         cnf = Config()
1780
1781         sourcepkg = self.pkg.changes["source"]
1782
1783         # No sourceful upload -> no need to do anything else, direct return
1784         # We also work with unstable uploads, not experimental or those going to some
1785         # proposed-updates queue
1786         if "source" not in self.pkg.changes["architecture"] or \
1787            "unstable" not in self.pkg.changes["distribution"]:
1788             return
1789
1790         # Also only check if there is a file defined (and existant) with
1791         # checks.
1792         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1793         if transpath == "" or not os.path.exists(transpath):
1794             return
1795
1796         # Parse the yaml file
1797         sourcefile = file(transpath, 'r')
1798         sourcecontent = sourcefile.read()
1799         try:
1800             transitions = yaml.load(sourcecontent)
1801         except yaml.YAMLError, msg:
1802             # This shouldn't happen, there is a wrapper to edit the file which
1803             # checks it, but we prefer to be safe than ending up rejecting
1804             # everything.
1805             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1806             return
1807
1808         # Now look through all defined transitions
1809         for trans in transitions:
1810             t = transitions[trans]
1811             source = t["source"]
1812             expected = t["new"]
1813
1814             # Will be None if nothing is in testing.
1815             current = get_source_in_suite(source, "testing", session)
1816             if current is not None:
1817                 compare = apt_pkg.VersionCompare(current.version, expected)
1818
1819             if current is None or compare < 0:
1820                 # This is still valid, the current version in testing is older than
1821                 # the new version we wait for, or there is none in testing yet
1822
1823                 # Check if the source we look at is affected by this.
1824                 if sourcepkg in t['packages']:
1825                     # The source is affected, lets reject it.
1826
1827                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1828                         sourcepkg, trans)
1829
1830                     if current is not None:
1831                         currentlymsg = "at version %s" % (current.version)
1832                     else:
1833                         currentlymsg = "not present in testing"
1834
1835                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1836
1837                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1838 is part of a testing transition designed to get %s migrated (it is
1839 currently %s, we need version %s).  This transition is managed by the
1840 Release Team, and %s is the Release-Team member responsible for it.
1841 Please mail debian-release@lists.debian.org or contact %s directly if you
1842 need further assistance.  You might want to upload to experimental until this
1843 transition is done."""
1844                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1845
1846                     self.rejects.append(rejectmsg)
1847                     return
1848
1849     ###########################################################################
1850     # End check_signed_by_key checks
1851     ###########################################################################
1852
1853     def build_summaries(self):
1854         """ Build a summary of changes the upload introduces. """
1855
1856         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1857
1858         short_summary = summary
1859
1860         # This is for direport's benefit...
1861         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1862
1863         if byhand or new:
1864             summary += "Changes: " + f
1865
1866         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1867
1868         summary += self.announce(short_summary, 0)
1869
1870         return (summary, short_summary)
1871
1872     ###########################################################################
1873
1874     def close_bugs(self, summary, action):
1875         """
1876         Send mail to close bugs as instructed by the closes field in the changes file.
1877         Also add a line to summary if any work was done.
1878
1879         @type summary: string
1880         @param summary: summary text, as given by L{build_summaries}
1881
1882         @type action: bool
1883         @param action: Set to false no real action will be done.
1884
1885         @rtype: string
1886         @return: summary. If action was taken, extended by the list of closed bugs.
1887
1888         """
1889
1890         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1891
1892         bugs = self.pkg.changes["closes"].keys()
1893
1894         if not bugs:
1895             return summary
1896
1897         bugs.sort()
1898         summary += "Closing bugs: "
1899         for bug in bugs:
1900             summary += "%s " % (bug)
1901             if action:
1902                 self.update_subst()
1903                 self.Subst["__BUG_NUMBER__"] = bug
1904                 if self.pkg.changes["distribution"].has_key("stable"):
1905                     self.Subst["__STABLE_WARNING__"] = """
1906 Note that this package is not part of the released stable Debian
1907 distribution.  It may have dependencies on other unreleased software,
1908 or other instabilities.  Please take care if you wish to install it.
1909 The update will eventually make its way into the next released Debian
1910 distribution."""
1911                 else:
1912                     self.Subst["__STABLE_WARNING__"] = ""
1913                 mail_message = utils.TemplateSubst(self.Subst, template)
1914                 utils.send_mail(mail_message)
1915
1916                 # Clear up after ourselves
1917                 del self.Subst["__BUG_NUMBER__"]
1918                 del self.Subst["__STABLE_WARNING__"]
1919
1920         if action and self.logger:
1921             self.logger.log(["closing bugs"] + bugs)
1922
1923         summary += "\n"
1924
1925         return summary
1926
1927     ###########################################################################
1928
1929     def announce(self, short_summary, action):
1930         """
1931         Send an announce mail about a new upload.
1932
1933         @type short_summary: string
1934         @param short_summary: Short summary text to include in the mail
1935
1936         @type action: bool
1937         @param action: Set to false no real action will be done.
1938
1939         @rtype: string
1940         @return: Textstring about action taken.
1941
1942         """
1943
1944         cnf = Config()
1945         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1946
1947         # Only do announcements for source uploads with a recent dpkg-dev installed
1948         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1949            self.pkg.changes["architecture"].has_key("source"):
1950             return ""
1951
1952         lists_done = {}
1953         summary = ""
1954
1955         self.Subst["__SHORT_SUMMARY__"] = short_summary
1956
1957         for dist in self.pkg.changes["distribution"].keys():
1958             suite = get_suite(dist)
1959             if suite is None: continue
1960             announce_list = suite.announce
1961             if announce_list == "" or lists_done.has_key(announce_list):
1962                 continue
1963
1964             lists_done[announce_list] = 1
1965             summary += "Announcing to %s\n" % (announce_list)
1966
1967             if action:
1968                 self.update_subst()
1969                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1970                 if cnf.get("Dinstall::TrackingServer") and \
1971                    self.pkg.changes["architecture"].has_key("source"):
1972                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1973                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1974
1975                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1976                 utils.send_mail(mail_message)
1977
1978                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1979
1980         if cnf.FindB("Dinstall::CloseBugs"):
1981             summary = self.close_bugs(summary, action)
1982
1983         del self.Subst["__SHORT_SUMMARY__"]
1984
1985         return summary
1986
1987     ###########################################################################
1988     @session_wrapper
1989     def accept (self, summary, short_summary, session=None):
1990         """
1991         Accept an upload.
1992
1993         This moves all files referenced from the .changes into the pool,
1994         sends the accepted mail, announces to lists, closes bugs and
1995         also checks for override disparities. If enabled it will write out
1996         the version history for the BTS Version Tracking and will finally call
1997         L{queue_build}.
1998
1999         @type summary: string
2000         @param summary: Summary text
2001
2002         @type short_summary: string
2003         @param short_summary: Short summary
2004         """
2005
2006         cnf = Config()
2007         stats = SummaryStats()
2008
2009         print "Installing."
2010         self.logger.log(["installing changes", self.pkg.changes_file])
2011
2012         poolfiles = []
2013
2014         # Add the .dsc file to the DB first
2015         for newfile, entry in self.pkg.files.items():
2016             if entry["type"] == "dsc":
2017                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2018                 for j in pfs:
2019                     poolfiles.append(j)
2020
2021         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2022         for newfile, entry in self.pkg.files.items():
2023             if entry["type"] == "deb":
2024                 poolfiles.append(add_deb_to_db(self, newfile, session))
2025
2026         # If this is a sourceful diff only upload that is moving
2027         # cross-component we need to copy the .orig files into the new
2028         # component too for the same reasons as above.
2029         # XXX: mhy: I think this should be in add_dsc_to_db
2030         if self.pkg.changes["architecture"].has_key("source"):
2031             for orig_file in self.pkg.orig_files.keys():
2032                 if not self.pkg.orig_files[orig_file].has_key("id"):
2033                     continue # Skip if it's not in the pool
2034                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2035                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2036                     continue # Skip if the location didn't change
2037
2038                 # Do the move
2039                 oldf = get_poolfile_by_id(orig_file_id, session)
2040                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2041                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2042                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2043
2044                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2045
2046                 # TODO: Care about size/md5sum collisions etc
2047                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2048
2049                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2050                 if newf is None:
2051                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2052                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2053
2054                     session.flush()
2055
2056                     # Don't reference the old file from this changes
2057                     for p in poolfiles:
2058                         if p.file_id == oldf.file_id:
2059                             poolfiles.remove(p)
2060
2061                     poolfiles.append(newf)
2062
2063                     # Fix up the DSC references
2064                     toremove = []
2065
2066                     for df in source.srcfiles:
2067                         if df.poolfile.file_id == oldf.file_id:
2068                             # Add a new DSC entry and mark the old one for deletion
2069                             # Don't do it in the loop so we don't change the thing we're iterating over
2070                             newdscf = DSCFile()
2071                             newdscf.source_id = source.source_id
2072                             newdscf.poolfile_id = newf.file_id
2073                             session.add(newdscf)
2074
2075                             toremove.append(df)
2076
2077                     for df in toremove:
2078                         session.delete(df)
2079
2080                     # Flush our changes
2081                     session.flush()
2082
2083                     # Make sure that our source object is up-to-date
2084                     session.expire(source)
2085
2086         # Add changelog information to the database
2087         self.store_changelog()
2088
2089         # Install the files into the pool
2090         for newfile, entry in self.pkg.files.items():
2091             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2092             utils.move(newfile, destination)
2093             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2094             stats.accept_bytes += float(entry["size"])
2095
2096         # Copy the .changes file across for suite which need it.
2097         copy_changes = dict([(x.copychanges, '')
2098                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2099                              if x.copychanges is not None])
2100
2101         for dest in copy_changes.keys():
2102             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2103
2104         # We're done - commit the database changes
2105         session.commit()
2106         # Our SQL session will automatically start a new transaction after
2107         # the last commit
2108
2109         # Move the .changes into the 'done' directory
2110         utils.move(self.pkg.changes_file,
2111                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2112
2113         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2114             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2115
2116         self.update_subst()
2117         self.Subst["__SUMMARY__"] = summary
2118         mail_message = utils.TemplateSubst(self.Subst,
2119                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2120         utils.send_mail(mail_message)
2121         self.announce(short_summary, 1)
2122
2123         ## Helper stuff for DebBugs Version Tracking
2124         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2125             if self.pkg.changes["architecture"].has_key("source"):
2126                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2127                 version_history = os.fdopen(fd, 'w')
2128                 version_history.write(self.pkg.dsc["bts changelog"])
2129                 version_history.close()
2130                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2131                                       self.pkg.changes_file[:-8]+".versions")
2132                 os.rename(temp_filename, filename)
2133                 os.chmod(filename, 0644)
2134
2135             # Write out the binary -> source mapping.
2136             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2137             debinfo = os.fdopen(fd, 'w')
2138             for name, entry in sorted(self.pkg.files.items()):
2139                 if entry["type"] == "deb":
2140                     line = " ".join([entry["package"], entry["version"],
2141                                      entry["architecture"], entry["source package"],
2142                                      entry["source version"]])
2143                     debinfo.write(line+"\n")
2144             debinfo.close()
2145             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2146                                   self.pkg.changes_file[:-8]+".debinfo")
2147             os.rename(temp_filename, filename)
2148             os.chmod(filename, 0644)
2149
2150         session.commit()
2151
2152         # Set up our copy queues (e.g. buildd queues)
2153         for suite_name in self.pkg.changes["distribution"].keys():
2154             suite = get_suite(suite_name, session)
2155             for q in suite.copy_queues:
2156                 for f in poolfiles:
2157                     q.add_file_from_pool(f)
2158
2159         session.commit()
2160
2161         # Finally...
2162         stats.accept_count += 1
2163
2164     def check_override(self):
2165         """
2166         Checks override entries for validity. Mails "Override disparity" warnings,
2167         if that feature is enabled.
2168
2169         Abandons the check if
2170           - override disparity checks are disabled
2171           - mail sending is disabled
2172         """
2173
2174         cnf = Config()
2175
2176         # Abandon the check if override disparity checks have been disabled
2177         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2178             return
2179
2180         summary = self.pkg.check_override()
2181
2182         if summary == "":
2183             return
2184
2185         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2186
2187         self.update_subst()
2188         self.Subst["__SUMMARY__"] = summary
2189         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2190         utils.send_mail(mail_message)
2191         del self.Subst["__SUMMARY__"]
2192
2193     ###########################################################################
2194
2195     def remove(self, from_dir=None):
2196         """
2197         Used (for instance) in p-u to remove the package from unchecked
2198
2199         Also removes the package from holding area.
2200         """
2201         if from_dir is None:
2202             from_dir = self.pkg.directory
2203         h = Holding()
2204
2205         for f in self.pkg.files.keys():
2206             os.unlink(os.path.join(from_dir, f))
2207             if os.path.exists(os.path.join(h.holding_dir, f)):
2208                 os.unlink(os.path.join(h.holding_dir, f))
2209
2210         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2211         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2212             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2213
2214     ###########################################################################
2215
2216     def move_to_queue (self, queue):
2217         """
2218         Move files to a destination queue using the permissions in the table
2219         """
2220         h = Holding()
2221         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2222                    queue.path, perms=int(queue.change_perms, 8))
2223         for f in self.pkg.files.keys():
2224             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2225
2226     ###########################################################################
2227
2228     def force_reject(self, reject_files):
2229         """
2230         Forcefully move files from the current directory to the
2231         reject directory.  If any file already exists in the reject
2232         directory it will be moved to the morgue to make way for
2233         the new file.
2234
2235         @type reject_files: dict
2236         @param reject_files: file dictionary
2237
2238         """
2239
2240         cnf = Config()
2241
2242         for file_entry in reject_files:
2243             # Skip any files which don't exist or which we don't have permission to copy.
2244             if os.access(file_entry, os.R_OK) == 0:
2245                 continue
2246
2247             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2248
2249             try:
2250                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2251             except OSError, e:
2252                 # File exists?  Let's find a new name by adding a number
2253                 if e.errno == errno.EEXIST:
2254                     try:
2255                         dest_file = utils.find_next_free(dest_file, 255)
2256                     except NoFreeFilenameError:
2257                         # Something's either gone badly Pete Tong, or
2258                         # someone is trying to exploit us.
2259                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2260                         return
2261
2262                     # Make sure we really got it
2263                     try:
2264                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2265                     except OSError, e:
2266                         # Likewise
2267                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2268                         return
2269                 else:
2270                     raise
2271             # If we got here, we own the destination file, so we can
2272             # safely overwrite it.
2273             utils.move(file_entry, dest_file, 1, perms=0660)
2274             os.close(dest_fd)
2275
2276     ###########################################################################
2277     def do_reject (self, manual=0, reject_message="", notes=""):
2278         """
2279         Reject an upload. If called without a reject message or C{manual} is
2280         true, spawn an editor so the user can write one.
2281
2282         @type manual: bool
2283         @param manual: manual or automated rejection
2284
2285         @type reject_message: string
2286         @param reject_message: A reject message
2287
2288         @return: 0
2289
2290         """
2291         # If we weren't given a manual rejection message, spawn an
2292         # editor so the user can add one in...
2293         if manual and not reject_message:
2294             (fd, temp_filename) = utils.temp_filename()
2295             temp_file = os.fdopen(fd, 'w')
2296             if len(notes) > 0:
2297                 for note in notes:
2298                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2299                                     % (note.author, note.version, note.notedate, note.comment))
2300             temp_file.close()
2301             editor = os.environ.get("EDITOR","vi")
2302             answer = 'E'
2303             while answer == 'E':
2304                 os.system("%s %s" % (editor, temp_filename))
2305                 temp_fh = utils.open_file(temp_filename)
2306                 reject_message = "".join(temp_fh.readlines())
2307                 temp_fh.close()
2308                 print "Reject message:"
2309                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2310                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2311                 answer = "XXX"
2312                 while prompt.find(answer) == -1:
2313                     answer = utils.our_raw_input(prompt)
2314                     m = re_default_answer.search(prompt)
2315                     if answer == "":
2316                         answer = m.group(1)
2317                     answer = answer[:1].upper()
2318             os.unlink(temp_filename)
2319             if answer == 'A':
2320                 return 1
2321             elif answer == 'Q':
2322                 sys.exit(0)
2323
2324         print "Rejecting.\n"
2325
2326         cnf = Config()
2327
2328         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2329         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2330
2331         # Move all the files into the reject directory
2332         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2333         self.force_reject(reject_files)
2334
2335         # If we fail here someone is probably trying to exploit the race
2336         # so let's just raise an exception ...
2337         if os.path.exists(reason_filename):
2338             os.unlink(reason_filename)
2339         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2340
2341         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2342
2343         self.update_subst()
2344         if not manual:
2345             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2346             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2347             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2348             os.write(reason_fd, reject_message)
2349             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2350         else:
2351             # Build up the rejection email
2352             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2353             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2354             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2355             self.Subst["__REJECT_MESSAGE__"] = ""
2356             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2357             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2358             # Write the rejection email out as the <foo>.reason file
2359             os.write(reason_fd, reject_mail_message)
2360
2361         del self.Subst["__REJECTOR_ADDRESS__"]
2362         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2363         del self.Subst["__CC__"]
2364
2365         os.close(reason_fd)
2366
2367         # Send the rejection mail
2368         utils.send_mail(reject_mail_message)
2369
2370         if self.logger:
2371             self.logger.log(["rejected", self.pkg.changes_file])
2372
2373         return 0
2374
2375     ################################################################################
2376     def in_override_p(self, package, component, suite, binary_type, filename, session):
2377         """
2378         Check if a package already has override entries in the DB
2379
2380         @type package: string
2381         @param package: package name
2382
2383         @type component: string
2384         @param component: database id of the component
2385
2386         @type suite: int
2387         @param suite: database id of the suite
2388
2389         @type binary_type: string
2390         @param binary_type: type of the package
2391
2392         @type filename: string
2393         @param filename: filename we check
2394
2395         @return: the database result. But noone cares anyway.
2396
2397         """
2398
2399         cnf = Config()
2400
2401         if binary_type == "": # must be source
2402             file_type = "dsc"
2403         else:
2404             file_type = binary_type
2405
2406         # Override suite name; used for example with proposed-updates
2407         oldsuite = get_suite(suite, session)
2408         if (not oldsuite is None) and oldsuite.overridesuite:
2409             suite = oldsuite.overridesuite
2410
2411         result = get_override(package, suite, component, file_type, session)
2412
2413         # If checking for a source package fall back on the binary override type
2414         if file_type == "dsc" and len(result) < 1:
2415             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2416
2417         # Remember the section and priority so we can check them later if appropriate
2418         if len(result) > 0:
2419             result = result[0]
2420             self.pkg.files[filename]["override section"] = result.section.section
2421             self.pkg.files[filename]["override priority"] = result.priority.priority
2422             return result
2423
2424         return None
2425
2426     ################################################################################
2427     def get_anyversion(self, sv_list, suite):
2428         """
2429         @type sv_list: list
2430         @param sv_list: list of (suite, version) tuples to check
2431
2432         @type suite: string
2433         @param suite: suite name
2434
2435         Description: TODO
2436         """
2437         Cnf = Config()
2438         anyversion = None
2439         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2440         for (s, v) in sv_list:
2441             if s in [ x.lower() for x in anysuite ]:
2442                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2443                     anyversion = v
2444
2445         return anyversion
2446
2447     ################################################################################
2448
2449     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2450         """
2451         @type sv_list: list
2452         @param sv_list: list of (suite, version) tuples to check
2453
2454         @type filename: string
2455         @param filename: XXX
2456
2457         @type new_version: string
2458         @param new_version: XXX
2459
2460         Ensure versions are newer than existing packages in target
2461         suites and that cross-suite version checking rules as
2462         set out in the conf file are satisfied.
2463         """
2464
2465         cnf = Config()
2466
2467         # Check versions for each target suite
2468         for target_suite in self.pkg.changes["distribution"].keys():
2469             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2470             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2471
2472             # Enforce "must be newer than target suite" even if conffile omits it
2473             if target_suite not in must_be_newer_than:
2474                 must_be_newer_than.append(target_suite)
2475
2476             for (suite, existent_version) in sv_list:
2477                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2478
2479                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2480                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2481
2482                 if suite in must_be_older_than and vercmp > -1:
2483                     cansave = 0
2484
2485                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2486                         # we really use the other suite, ignoring the conflicting one ...
2487                         addsuite = self.pkg.changes["distribution-version"][suite]
2488
2489                         add_version = self.get_anyversion(sv_list, addsuite)
2490                         target_version = self.get_anyversion(sv_list, target_suite)
2491
2492                         if not add_version:
2493                             # not add_version can only happen if we map to a suite
2494                             # that doesn't enhance the suite we're propup'ing from.
2495                             # so "propup-ver x a b c; map a d" is a problem only if
2496                             # d doesn't enhance a.
2497                             #
2498                             # i think we could always propagate in this case, rather
2499                             # than complaining. either way, this isn't a REJECT issue
2500                             #
2501                             # And - we really should complain to the dorks who configured dak
2502                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2503                             self.pkg.changes.setdefault("propdistribution", {})
2504                             self.pkg.changes["propdistribution"][addsuite] = 1
2505                             cansave = 1
2506                         elif not target_version:
2507                             # not targets_version is true when the package is NEW
2508                             # we could just stick with the "...old version..." REJECT
2509                             # for this, I think.
2510                             self.rejects.append("Won't propogate NEW packages.")
2511                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2512                             # propogation would be redundant. no need to reject though.
2513                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2514                             cansave = 1
2515                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2516                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2517                             # propogate!!
2518                             self.warnings.append("Propogating upload to %s" % (addsuite))
2519                             self.pkg.changes.setdefault("propdistribution", {})
2520                             self.pkg.changes["propdistribution"][addsuite] = 1
2521                             cansave = 1
2522
2523                     if not cansave:
2524                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2525
2526     ################################################################################
2527     def check_binary_against_db(self, filename, session):
2528         # Ensure version is sane
2529         self.cross_suite_version_check( \
2530             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2531                 self.pkg.files[filename]["architecture"], session),
2532             filename, self.pkg.files[filename]["version"], sourceful=False)
2533
2534         # Check for any existing copies of the file
2535         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2536         q = q.filter_by(version=self.pkg.files[filename]["version"])
2537         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2538
2539         if q.count() > 0:
2540             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2541
2542     ################################################################################
2543
2544     def check_source_against_db(self, filename, session):
2545         source = self.pkg.dsc.get("source")
2546         version = self.pkg.dsc.get("version")
2547
2548         # Ensure version is sane
2549         self.cross_suite_version_check( \
2550             get_suite_version_by_source(source, session), filename, version,
2551             sourceful=True)
2552
2553     ################################################################################
2554     def check_dsc_against_db(self, filename, session):
2555         """
2556
2557         @warning: NB: this function can remove entries from the 'files' index [if
2558          the orig tarball is a duplicate of the one in the archive]; if
2559          you're iterating over 'files' and call this function as part of
2560          the loop, be sure to add a check to the top of the loop to
2561          ensure you haven't just tried to dereference the deleted entry.
2562
2563         """
2564
2565         Cnf = Config()
2566         self.pkg.orig_files = {} # XXX: do we need to clear it?
2567         orig_files = self.pkg.orig_files
2568
2569         # Try and find all files mentioned in the .dsc.  This has
2570         # to work harder to cope with the multiple possible
2571         # locations of an .orig.tar.gz.
2572         # The ordering on the select is needed to pick the newest orig
2573         # when it exists in multiple places.
2574         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2575             found = None
2576             if self.pkg.files.has_key(dsc_name):
2577                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2578                 actual_size = int(self.pkg.files[dsc_name]["size"])
2579                 found = "%s in incoming" % (dsc_name)
2580
2581                 # Check the file does not already exist in the archive
2582                 ql = get_poolfile_like_name(dsc_name, session)
2583
2584                 # Strip out anything that isn't '%s' or '/%s$'
2585                 for i in ql:
2586                     if not i.filename.endswith(dsc_name):
2587                         ql.remove(i)
2588
2589                 # "[dak] has not broken them.  [dak] has fixed a
2590                 # brokenness.  Your crappy hack exploited a bug in
2591                 # the old dinstall.
2592                 #
2593                 # "(Come on!  I thought it was always obvious that
2594                 # one just doesn't release different files with
2595                 # the same name and version.)"
2596                 #                        -- ajk@ on d-devel@l.d.o
2597
2598                 if len(ql) > 0:
2599                     # Ignore exact matches for .orig.tar.gz
2600                     match = 0
2601                     if re_is_orig_source.match(dsc_name):
2602                         for i in ql:
2603                             if self.pkg.files.has_key(dsc_name) and \
2604                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2605                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2606                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2607                                 # TODO: Don't delete the entry, just mark it as not needed
2608                                 # This would fix the stupidity of changing something we often iterate over
2609                                 # whilst we're doing it
2610                                 del self.pkg.files[dsc_name]
2611                                 dsc_entry["files id"] = i.file_id
2612                                 if not orig_files.has_key(dsc_name):
2613                                     orig_files[dsc_name] = {}
2614                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2615                                 match = 1
2616
2617                                 # Don't bitch that we couldn't find this file later
2618                                 try:
2619                                     self.later_check_files.remove(dsc_name)
2620                                 except ValueError:
2621                                     pass
2622
2623
2624                     if not match:
2625                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2626
2627             elif re_is_orig_source.match(dsc_name):
2628                 # Check in the pool
2629                 ql = get_poolfile_like_name(dsc_name, session)
2630
2631                 # Strip out anything that isn't '%s' or '/%s$'
2632                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2633                 for i in ql:
2634                     if not i.filename.endswith(dsc_name):
2635                         ql.remove(i)
2636
2637                 if len(ql) > 0:
2638                     # Unfortunately, we may get more than one match here if,
2639                     # for example, the package was in potato but had an -sa
2640                     # upload in woody.  So we need to choose the right one.
2641
2642                     # default to something sane in case we don't match any or have only one
2643                     x = ql[0]
2644
2645                     if len(ql) > 1:
2646                         for i in ql:
2647                             old_file = os.path.join(i.location.path, i.filename)
2648                             old_file_fh = utils.open_file(old_file)
2649                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2650                             old_file_fh.close()
2651                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2652                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2653                                 x = i
2654
2655                     old_file = os.path.join(i.location.path, i.filename)
2656                     old_file_fh = utils.open_file(old_file)
2657                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2658                     old_file_fh.close()
2659                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2660                     found = old_file
2661                     suite_type = x.location.archive_type
2662                     # need this for updating dsc_files in install()
2663                     dsc_entry["files id"] = x.file_id
2664                     # See install() in process-accepted...
2665                     if not orig_files.has_key(dsc_name):
2666                         orig_files[dsc_name] = {}
2667                     orig_files[dsc_name]["id"] = x.file_id
2668                     orig_files[dsc_name]["path"] = old_file
2669                     orig_files[dsc_name]["location"] = x.location.location_id
2670                 else:
2671                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2672                     # Not there? Check the queue directories...
2673                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2674                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2675                             continue
2676                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2677                         if os.path.exists(in_otherdir):
2678                             in_otherdir_fh = utils.open_file(in_otherdir)
2679                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2680                             in_otherdir_fh.close()
2681                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2682                             found = in_otherdir
2683                             if not orig_files.has_key(dsc_name):
2684                                 orig_files[dsc_name] = {}
2685                             orig_files[dsc_name]["path"] = in_otherdir
2686
2687                     if not found:
2688                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2689                         continue
2690             else:
2691                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2692                 continue
2693             if actual_md5 != dsc_entry["md5sum"]:
2694                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2695             if actual_size != int(dsc_entry["size"]):
2696                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2697
2698     ################################################################################
2699     # This is used by process-new and process-holding to recheck a changes file
2700     # at the time we're running.  It mainly wraps various other internal functions
2701     # and is similar to accepted_checks - these should probably be tidied up
2702     # and combined
2703     def recheck(self, session):
2704         cnf = Config()
2705         for f in self.pkg.files.keys():
2706             # The .orig.tar.gz can disappear out from under us is it's a
2707             # duplicate of one in the archive.
2708             if not self.pkg.files.has_key(f):
2709                 continue
2710
2711             entry = self.pkg.files[f]
2712
2713             # Check that the source still exists
2714             if entry["type"] == "deb":
2715                 source_version = entry["source version"]
2716                 source_package = entry["source package"]
2717                 if not self.pkg.changes["architecture"].has_key("source") \
2718                    and not source_exists(source_package, source_version, \
2719                     suites = self.pkg.changes["distribution"].keys(), session = session):
2720                     source_epochless_version = re_no_epoch.sub('', source_version)
2721                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2722                     found = False
2723                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2724                         if cnf.has_key("Dir::Queue::%s" % (q)):
2725                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2726                                 found = True
2727                     if not found:
2728                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2729
2730             # Version and file overwrite checks
2731             if entry["type"] == "deb":
2732                 self.check_binary_against_db(f, session)
2733             elif entry["type"] == "dsc":
2734                 self.check_source_against_db(f, session)
2735                 self.check_dsc_against_db(f, session)
2736
2737     ################################################################################
2738     def accepted_checks(self, overwrite_checks, session):
2739         # Recheck anything that relies on the database; since that's not
2740         # frozen between accept and our run time when called from p-a.
2741
2742         # overwrite_checks is set to False when installing to stable/oldstable
2743
2744         propogate={}
2745         nopropogate={}
2746
2747         # Find the .dsc (again)
2748         dsc_filename = None
2749         for f in self.pkg.files.keys():
2750             if self.pkg.files[f]["type"] == "dsc":
2751                 dsc_filename = f
2752
2753         for checkfile in self.pkg.files.keys():
2754             # The .orig.tar.gz can disappear out from under us is it's a
2755             # duplicate of one in the archive.
2756             if not self.pkg.files.has_key(checkfile):
2757                 continue
2758
2759             entry = self.pkg.files[checkfile]
2760
2761             # Check that the source still exists
2762             if entry["type"] == "deb":
2763                 source_version = entry["source version"]
2764                 source_package = entry["source package"]
2765                 if not self.pkg.changes["architecture"].has_key("source") \
2766                    and not source_exists(source_package, source_version, \
2767                     suites = self.pkg.changes["distribution"].keys(), \
2768                     session = session):
2769                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2770
2771             # Version and file overwrite checks
2772             if overwrite_checks:
2773                 if entry["type"] == "deb":
2774                     self.check_binary_against_db(checkfile, session)
2775                 elif entry["type"] == "dsc":
2776                     self.check_source_against_db(checkfile, session)
2777                     self.check_dsc_against_db(dsc_filename, session)
2778
2779             # propogate in the case it is in the override tables:
2780             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2781                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2782                     propogate[suite] = 1
2783                 else:
2784                     nopropogate[suite] = 1
2785
2786         for suite in propogate.keys():
2787             if suite in nopropogate:
2788                 continue
2789             self.pkg.changes["distribution"][suite] = 1
2790
2791         for checkfile in self.pkg.files.keys():
2792             # Check the package is still in the override tables
2793             for suite in self.pkg.changes["distribution"].keys():
2794                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2795                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2796
2797     ################################################################################
2798     # If any file of an upload has a recent mtime then chances are good
2799     # the file is still being uploaded.
2800
2801     def upload_too_new(self):
2802         cnf = Config()
2803         too_new = False
2804         # Move back to the original directory to get accurate time stamps
2805         cwd = os.getcwd()
2806         os.chdir(self.pkg.directory)
2807         file_list = self.pkg.files.keys()
2808         file_list.extend(self.pkg.dsc_files.keys())
2809         file_list.append(self.pkg.changes_file)
2810         for f in file_list:
2811             try:
2812                 last_modified = time.time()-os.path.getmtime(f)
2813                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2814                     too_new = True
2815                     break
2816             except:
2817                 pass
2818
2819         os.chdir(cwd)
2820         return too_new
2821
2822     def store_changelog(self):
2823
2824         # Skip binary-only upload if it is not a bin-NMU
2825         if not self.pkg.changes['architecture'].has_key('source'):
2826             from daklib.regexes import re_bin_only_nmu
2827             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2828                 return
2829
2830         session = DBConn().session()
2831
2832         # Check if upload already has a changelog entry
2833         query = """SELECT changelog_id FROM changes WHERE source = :source
2834                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2835         if session.execute(query, {'source': self.pkg.changes['source'], \
2836                                    'version': self.pkg.changes['version'], \
2837                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2838             session.commit()
2839             return
2840
2841         # Add current changelog text into changelogs_text table, return created ID
2842         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2843         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2844
2845         # Link ID to the upload available in changes table
2846         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2847                    AND version = :version AND architecture = :architecture"""
2848         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2849                                 'version': self.pkg.changes['version'], \
2850                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2851
2852         session.commit()