]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Ignore more python-apt warnings.
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute '.*' of the 'apt_pkg\.TagSection' object is deprecated, use '.*' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
91         utils.warn('unreadable source file (will continue and hope for the best)')
92         return f["type"]
93     else:
94         file_type = f["type"]
95         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
96
97     # Validate the override type
98     type_id = get_override_type(file_type, session)
99     if type_id is None:
100         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
101
102     return file_type
103
104 ################################################################################
105
106 # Determine what parts in a .changes are NEW
107
108 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
109     """
110     Determine what parts in a C{changes} file are NEW.
111
112     @type filename: str
113     @param filename: changes filename
114
115     @type changes: Upload.Pkg.changes dict
116     @param changes: Changes dictionary
117
118     @type files: Upload.Pkg.files dict
119     @param files: Files dictionary
120
121     @type warn: bool
122     @param warn: Warn if overrides are added for (old)stable
123
124     @type dsc: Upload.Pkg.dsc dict
125     @param dsc: (optional); Dsc dictionary
126
127     @type new: dict
128     @param new: new packages as returned by a previous call to this function, but override information may have changed
129
130     @rtype: dict
131     @return: dictionary of NEW components.
132
133     """
134     # TODO: This should all use the database instead of parsing the changes
135     # file again
136     byhand = {}
137     if new is None:
138         new = {}
139
140     dbchg = get_dbchange(filename, session)
141     if dbchg is None:
142         print "Warning: cannot find changes file in database; won't check byhand"
143
144     # Try to get the Package-Set field from an included .dsc file (if possible).
145     if dsc:
146         for package, entry in build_package_set(dsc, session).items():
147             if not new.has_key(package):
148                 new[package] = entry
149
150     # Build up a list of potentially new things
151     for name, f in files.items():
152         # Keep a record of byhand elements
153         if f["section"] == "byhand":
154             byhand[name] = 1
155             continue
156
157         pkg = f["package"]
158         priority = f["priority"]
159         section = f["section"]
160         file_type = get_type(f, session)
161         component = f["component"]
162
163         if file_type == "dsc":
164             priority = "source"
165
166         if not new.has_key(pkg):
167             new[pkg] = {}
168             new[pkg]["priority"] = priority
169             new[pkg]["section"] = section
170             new[pkg]["type"] = file_type
171             new[pkg]["component"] = component
172             new[pkg]["files"] = []
173         else:
174             old_type = new[pkg]["type"]
175             if old_type != file_type:
176                 # source gets trumped by deb or udeb
177                 if old_type == "dsc":
178                     new[pkg]["priority"] = priority
179                     new[pkg]["section"] = section
180                     new[pkg]["type"] = file_type
181                     new[pkg]["component"] = component
182
183         new[pkg]["files"].append(name)
184
185         if f.has_key("othercomponents"):
186             new[pkg]["othercomponents"] = f["othercomponents"]
187
188     # Fix up the list of target suites
189     cnf = Config()
190     for suite in changes["suite"].keys():
191         oldsuite = get_suite(suite, session)
192         if not oldsuite:
193             print "WARNING: Invalid suite %s found" % suite
194             continue
195
196         if oldsuite.overridesuite:
197             newsuite = get_suite(oldsuite.overridesuite, session)
198
199             if newsuite:
200                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
201                     oldsuite.overridesuite, suite)
202                 del changes["suite"][suite]
203                 changes["suite"][oldsuite.overridesuite] = 1
204             else:
205                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
206                     oldsuite.overridesuite, suite)
207
208     # Check for unprocessed byhand files
209     if dbchg is not None:
210         for b in byhand.keys():
211             # Find the file entry in the database
212             found = False
213             for f in dbchg.files:
214                 if f.filename == b:
215                     found = True
216                     # If it's processed, we can ignore it
217                     if f.processed:
218                         del byhand[b]
219                     break
220
221             if not found:
222                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
223
224     # Check for new stuff
225     for suite in changes["suite"].keys():
226         for pkg in new.keys():
227             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
228             if len(ql) > 0:
229                 for file_entry in new[pkg]["files"]:
230                     if files[file_entry].has_key("new"):
231                         del files[file_entry]["new"]
232                 del new[pkg]
233
234     if warn:
235         for s in ['stable', 'oldstable']:
236             if changes["suite"].has_key(s):
237                 print "WARNING: overrides will be added for %s!" % s
238         for pkg in new.keys():
239             if new[pkg].has_key("othercomponents"):
240                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
241
242     return new, byhand
243
244 ################################################################################
245
246 def check_valid(new, session = None):
247     """
248     Check if section and priority for NEW packages exist in database.
249     Additionally does sanity checks:
250       - debian-installer packages have to be udeb (or source)
251       - non debian-installer packages can not be udeb
252       - source priority can only be assigned to dsc file types
253
254     @type new: dict
255     @param new: Dict of new packages with their section, priority and type.
256
257     """
258     for pkg in new.keys():
259         section_name = new[pkg]["section"]
260         priority_name = new[pkg]["priority"]
261         file_type = new[pkg]["type"]
262
263         section = get_section(section_name, session)
264         if section is None:
265             new[pkg]["section id"] = -1
266         else:
267             new[pkg]["section id"] = section.section_id
268
269         priority = get_priority(priority_name, session)
270         if priority is None:
271             new[pkg]["priority id"] = -1
272         else:
273             new[pkg]["priority id"] = priority.priority_id
274
275         # Sanity checks
276         di = section_name.find("debian-installer") != -1
277
278         # If d-i, we must be udeb and vice-versa
279         if     (di and file_type not in ("udeb", "dsc")) or \
280            (not di and file_type == "udeb"):
281             new[pkg]["section id"] = -1
282
283         # If dsc we need to be source and vice-versa
284         if (priority == "source" and file_type != "dsc") or \
285            (priority != "source" and file_type == "dsc"):
286             new[pkg]["priority id"] = -1
287
288 ###############################################################################
289
290 # Used by Upload.check_timestamps
291 class TarTime(object):
292     def __init__(self, future_cutoff, past_cutoff):
293         self.reset()
294         self.future_cutoff = future_cutoff
295         self.past_cutoff = past_cutoff
296
297     def reset(self):
298         self.future_files = {}
299         self.ancient_files = {}
300
301     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
302         if MTime > self.future_cutoff:
303             self.future_files[Name] = MTime
304         if MTime < self.past_cutoff:
305             self.ancient_files[Name] = MTime
306
307 ###############################################################################
308
309 def prod_maintainer(notes, upload):
310     cnf = Config()
311
312     # Here we prepare an editor and get them ready to prod...
313     (fd, temp_filename) = utils.temp_filename()
314     temp_file = os.fdopen(fd, 'w')
315     for note in notes:
316         temp_file.write(note.comment)
317     temp_file.close()
318     editor = os.environ.get("EDITOR","vi")
319     answer = 'E'
320     while answer == 'E':
321         os.system("%s %s" % (editor, temp_filename))
322         temp_fh = utils.open_file(temp_filename)
323         prod_message = "".join(temp_fh.readlines())
324         temp_fh.close()
325         print "Prod message:"
326         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
327         prompt = "[P]rod, Edit, Abandon, Quit ?"
328         answer = "XXX"
329         while prompt.find(answer) == -1:
330             answer = utils.our_raw_input(prompt)
331             m = re_default_answer.search(prompt)
332             if answer == "":
333                 answer = m.group(1)
334             answer = answer[:1].upper()
335     os.unlink(temp_filename)
336     if answer == 'A':
337         return
338     elif answer == 'Q':
339         end()
340         sys.exit(0)
341     # Otherwise, do the proding...
342     user_email_address = utils.whoami() + " <%s>" % (
343         cnf["Dinstall::MyAdminAddress"])
344
345     Subst = upload.Subst
346
347     Subst["__FROM_ADDRESS__"] = user_email_address
348     Subst["__PROD_MESSAGE__"] = prod_message
349     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
350
351     prod_mail_message = utils.TemplateSubst(
352         Subst,cnf["Dir::Templates"]+"/process-new.prod")
353
354     # Send the prod mail
355     utils.send_mail(prod_mail_message)
356
357     print "Sent prodding message"
358
359 ################################################################################
360
361 def edit_note(note, upload, session, trainee=False):
362     # Write the current data to a temporary file
363     (fd, temp_filename) = utils.temp_filename()
364     editor = os.environ.get("EDITOR","vi")
365     answer = 'E'
366     while answer == 'E':
367         os.system("%s %s" % (editor, temp_filename))
368         temp_file = utils.open_file(temp_filename)
369         newnote = temp_file.read().rstrip()
370         temp_file.close()
371         print "New Note:"
372         print utils.prefix_multi_line_string(newnote,"  ")
373         prompt = "[D]one, Edit, Abandon, Quit ?"
374         answer = "XXX"
375         while prompt.find(answer) == -1:
376             answer = utils.our_raw_input(prompt)
377             m = re_default_answer.search(prompt)
378             if answer == "":
379                 answer = m.group(1)
380             answer = answer[:1].upper()
381     os.unlink(temp_filename)
382     if answer == 'A':
383         return
384     elif answer == 'Q':
385         end()
386         sys.exit(0)
387
388     comment = NewComment()
389     comment.package = upload.pkg.changes["source"]
390     comment.version = upload.pkg.changes["version"]
391     comment.comment = newnote
392     comment.author  = utils.whoami()
393     comment.trainee = trainee
394     session.add(comment)
395     session.commit()
396
397 ###############################################################################
398
399 # suite names DMs can upload to
400 dm_suites = ['unstable', 'experimental']
401
402 def get_newest_source(source, session):
403     'returns the newest DBSource object in dm_suites'
404     ## the most recent version of the package uploaded to unstable or
405     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
406     ## section of its control file
407     q = session.query(DBSource).filter_by(source = source). \
408         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
409         order_by(desc('source.version'))
410     return q.first()
411
412 def get_suite_version_by_source(source, session):
413     'returns a list of tuples (suite_name, version) for source package'
414     q = session.query(Suite.suite_name, DBSource.version). \
415         join(Suite.sources).filter_by(source = source)
416     return q.all()
417
418 def get_source_by_package_and_suite(package, suite_name, session):
419     '''
420     returns a DBSource query filtered by DBBinary.package and this package's
421     suite_name
422     '''
423     return session.query(DBSource). \
424         join(DBSource.binaries).filter_by(package = package). \
425         join(DBBinary.suites).filter_by(suite_name = suite_name)
426
427 def get_suite_version_by_package(package, arch_string, session):
428     '''
429     returns a list of tuples (suite_name, version) for binary package and
430     arch_string
431     '''
432     return session.query(Suite.suite_name, DBBinary.version). \
433         join(Suite.binaries).filter_by(package = package). \
434         join(DBBinary.architecture). \
435         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
436
437 class Upload(object):
438     """
439     Everything that has to do with an upload processed.
440
441     """
442     def __init__(self):
443         self.logger = None
444         self.pkg = Changes()
445         self.reset()
446
447     ###########################################################################
448
449     def reset (self):
450         """ Reset a number of internal variables."""
451
452         # Initialize the substitution template map
453         cnf = Config()
454         self.Subst = {}
455         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
456         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
457         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
458         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
459
460         self.rejects = []
461         self.warnings = []
462         self.notes = []
463
464         self.later_check_files = []
465
466         self.pkg.reset()
467
468     def package_info(self):
469         """
470         Format various messages from this Upload to send to the maintainer.
471         """
472
473         msgs = (
474             ('Reject Reasons', self.rejects),
475             ('Warnings', self.warnings),
476             ('Notes', self.notes),
477         )
478
479         msg = ''
480         for title, messages in msgs:
481             if messages:
482                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
483         msg += '\n\n'
484
485         return msg
486
487     ###########################################################################
488     def update_subst(self):
489         """ Set up the per-package template substitution mappings """
490
491         cnf = Config()
492
493         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
494         if not self.pkg.changes.has_key("architecture") or not \
495            isinstance(self.pkg.changes["architecture"], dict):
496             self.pkg.changes["architecture"] = { "Unknown" : "" }
497
498         # and maintainer2047 may not exist.
499         if not self.pkg.changes.has_key("maintainer2047"):
500             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
501
502         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
503         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
504         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
505
506         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
507         if self.pkg.changes["architecture"].has_key("source") and \
508            self.pkg.changes["changedby822"] != "" and \
509            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
510
511             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
512             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
513             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
514         else:
515             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
516             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
517             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
518
519         # Process policy doesn't set the fingerprint field and I don't want to make it
520         # do it for now as I don't want to have to deal with the case where we accepted
521         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
522         # the meantime so the package will be remarked as rejectable.  Urgh.
523         # TODO: Fix this properly
524         if self.pkg.changes.has_key('fingerprint'):
525             session = DBConn().session()
526             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
527             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
528                 if self.pkg.changes.has_key("sponsoremail"):
529                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
530             session.close()
531
532         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
533             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
534
535         # Apply any global override of the Maintainer field
536         if cnf.get("Dinstall::OverrideMaintainer"):
537             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
538             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
539
540         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
541         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
542         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
543         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
544
545     ###########################################################################
546     def load_changes(self, filename):
547         """
548         Load a changes file and setup a dictionary around it. Also checks for mandantory
549         fields  within.
550
551         @type filename: string
552         @param filename: Changes filename, full path.
553
554         @rtype: boolean
555         @return: whether the changes file was valid or not.  We may want to
556                  reject even if this is True (see what gets put in self.rejects).
557                  This is simply to prevent us even trying things later which will
558                  fail because we couldn't properly parse the file.
559         """
560         Cnf = Config()
561         self.pkg.changes_file = filename
562
563         # Parse the .changes field into a dictionary
564         try:
565             self.pkg.changes.update(parse_changes(filename))
566         except CantOpenError:
567             self.rejects.append("%s: can't read file." % (filename))
568             return False
569         except ParseChangesError, line:
570             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
571             return False
572         except ChangesUnicodeError:
573             self.rejects.append("%s: changes file not proper utf-8" % (filename))
574             return False
575
576         # Parse the Files field from the .changes into another dictionary
577         try:
578             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
579         except ParseChangesError, line:
580             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
581             return False
582         except UnknownFormatError, format:
583             self.rejects.append("%s: unknown format '%s'." % (filename, format))
584             return False
585
586         # Check for mandatory fields
587         for i in ("distribution", "source", "binary", "architecture",
588                   "version", "maintainer", "files", "changes", "description"):
589             if not self.pkg.changes.has_key(i):
590                 # Avoid undefined errors later
591                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
592                 return False
593
594         # Strip a source version in brackets from the source field
595         if re_strip_srcver.search(self.pkg.changes["source"]):
596             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
597
598         # Ensure the source field is a valid package name.
599         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
600             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
601
602         # Split multi-value fields into a lower-level dictionary
603         for i in ("architecture", "distribution", "binary", "closes"):
604             o = self.pkg.changes.get(i, "")
605             if o != "":
606                 del self.pkg.changes[i]
607
608             self.pkg.changes[i] = {}
609
610             for j in o.split():
611                 self.pkg.changes[i][j] = 1
612
613         # Fix the Maintainer: field to be RFC822/2047 compatible
614         try:
615             (self.pkg.changes["maintainer822"],
616              self.pkg.changes["maintainer2047"],
617              self.pkg.changes["maintainername"],
618              self.pkg.changes["maintaineremail"]) = \
619                    fix_maintainer (self.pkg.changes["maintainer"])
620         except ParseMaintError, msg:
621             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
622                    % (filename, self.pkg.changes["maintainer"], msg))
623
624         # ...likewise for the Changed-By: field if it exists.
625         try:
626             (self.pkg.changes["changedby822"],
627              self.pkg.changes["changedby2047"],
628              self.pkg.changes["changedbyname"],
629              self.pkg.changes["changedbyemail"]) = \
630                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
631         except ParseMaintError, msg:
632             self.pkg.changes["changedby822"] = ""
633             self.pkg.changes["changedby2047"] = ""
634             self.pkg.changes["changedbyname"] = ""
635             self.pkg.changes["changedbyemail"] = ""
636
637             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
638                    % (filename, self.pkg.changes["changed-by"], msg))
639
640         # Ensure all the values in Closes: are numbers
641         if self.pkg.changes.has_key("closes"):
642             for i in self.pkg.changes["closes"].keys():
643                 if re_isanum.match (i) == None:
644                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
645
646         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
647         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
648         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
649
650         # Check the .changes is non-empty
651         if not self.pkg.files:
652             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
653             return False
654
655         # Changes was syntactically valid even if we'll reject
656         return True
657
658     ###########################################################################
659
660     def check_distributions(self):
661         "Check and map the Distribution field"
662
663         Cnf = Config()
664
665         # Handle suite mappings
666         for m in Cnf.ValueList("SuiteMappings"):
667             args = m.split()
668             mtype = args[0]
669             if mtype == "map" or mtype == "silent-map":
670                 (source, dest) = args[1:3]
671                 if self.pkg.changes["distribution"].has_key(source):
672                     del self.pkg.changes["distribution"][source]
673                     self.pkg.changes["distribution"][dest] = 1
674                     if mtype != "silent-map":
675                         self.notes.append("Mapping %s to %s." % (source, dest))
676                 if self.pkg.changes.has_key("distribution-version"):
677                     if self.pkg.changes["distribution-version"].has_key(source):
678                         self.pkg.changes["distribution-version"][source]=dest
679             elif mtype == "map-unreleased":
680                 (source, dest) = args[1:3]
681                 if self.pkg.changes["distribution"].has_key(source):
682                     for arch in self.pkg.changes["architecture"].keys():
683                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
684                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
685                             del self.pkg.changes["distribution"][source]
686                             self.pkg.changes["distribution"][dest] = 1
687                             break
688             elif mtype == "ignore":
689                 suite = args[1]
690                 if self.pkg.changes["distribution"].has_key(suite):
691                     del self.pkg.changes["distribution"][suite]
692                     self.warnings.append("Ignoring %s as a target suite." % (suite))
693             elif mtype == "reject":
694                 suite = args[1]
695                 if self.pkg.changes["distribution"].has_key(suite):
696                     self.rejects.append("Uploads to %s are not accepted." % (suite))
697             elif mtype == "propup-version":
698                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
699                 #
700                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
701                 if self.pkg.changes["distribution"].has_key(args[1]):
702                     self.pkg.changes.setdefault("distribution-version", {})
703                     for suite in args[2:]:
704                         self.pkg.changes["distribution-version"][suite] = suite
705
706         # Ensure there is (still) a target distribution
707         if len(self.pkg.changes["distribution"].keys()) < 1:
708             self.rejects.append("No valid distribution remaining.")
709
710         # Ensure target distributions exist
711         for suite in self.pkg.changes["distribution"].keys():
712             if not Cnf.has_key("Suite::%s" % (suite)):
713                 self.rejects.append("Unknown distribution `%s'." % (suite))
714
715     ###########################################################################
716
717     def binary_file_checks(self, f, session):
718         cnf = Config()
719         entry = self.pkg.files[f]
720
721         # Extract package control information
722         deb_file = utils.open_file(f)
723         try:
724             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
725         except:
726             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
727             deb_file.close()
728             # Can't continue, none of the checks on control would work.
729             return
730
731         # Check for mandantory "Description:"
732         deb_file.seek(0)
733         try:
734             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
735         except:
736             self.rejects.append("%s: Missing Description in binary package" % (f))
737             return
738
739         deb_file.close()
740
741         # Check for mandatory fields
742         for field in [ "Package", "Architecture", "Version" ]:
743             if control.Find(field) == None:
744                 # Can't continue
745                 self.rejects.append("%s: No %s field in control." % (f, field))
746                 return
747
748         # Ensure the package name matches the one give in the .changes
749         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
750             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
751
752         # Validate the package field
753         package = control.Find("Package")
754         if not re_valid_pkg_name.match(package):
755             self.rejects.append("%s: invalid package name '%s'." % (f, package))
756
757         # Validate the version field
758         version = control.Find("Version")
759         if not re_valid_version.match(version):
760             self.rejects.append("%s: invalid version number '%s'." % (f, version))
761
762         # Ensure the architecture of the .deb is one we know about.
763         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
764         architecture = control.Find("Architecture")
765         upload_suite = self.pkg.changes["distribution"].keys()[0]
766
767         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
768             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
769             self.rejects.append("Unknown architecture '%s'." % (architecture))
770
771         # Ensure the architecture of the .deb is one of the ones
772         # listed in the .changes.
773         if not self.pkg.changes["architecture"].has_key(architecture):
774             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
775
776         # Sanity-check the Depends field
777         depends = control.Find("Depends")
778         if depends == '':
779             self.rejects.append("%s: Depends field is empty." % (f))
780
781         # Sanity-check the Provides field
782         provides = control.Find("Provides")
783         if provides:
784             provide = re_spacestrip.sub('', provides)
785             if provide == '':
786                 self.rejects.append("%s: Provides field is empty." % (f))
787             prov_list = provide.split(",")
788             for prov in prov_list:
789                 if not re_valid_pkg_name.match(prov):
790                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
791
792         # If there is a Built-Using field, we need to check we can find the
793         # exact source version
794         built_using = control.Find("Built-Using")
795         if built_using:
796             try:
797                 entry["built-using"] = []
798                 for dep in apt_pkg.parse_depends(built_using):
799                     bu_s, bu_v, bu_e = dep[0]
800                     # Check that it's an exact match dependency and we have
801                     # some form of version
802                     if bu_e != "=" or len(bu_v) < 1:
803                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
804                     else:
805                         # Find the source id for this version
806                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
807                         if len(bu_so) != 1:
808                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
809                         else:
810                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
811
812             except ValueError, e:
813                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
814
815
816         # Check the section & priority match those given in the .changes (non-fatal)
817         if     control.Find("Section") and entry["section"] != "" \
818            and entry["section"] != control.Find("Section"):
819             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
820                                 (f, control.Find("Section", ""), entry["section"]))
821         if control.Find("Priority") and entry["priority"] != "" \
822            and entry["priority"] != control.Find("Priority"):
823             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
824                                 (f, control.Find("Priority", ""), entry["priority"]))
825
826         entry["package"] = package
827         entry["architecture"] = architecture
828         entry["version"] = version
829         entry["maintainer"] = control.Find("Maintainer", "")
830
831         if f.endswith(".udeb"):
832             self.pkg.files[f]["dbtype"] = "udeb"
833         elif f.endswith(".deb"):
834             self.pkg.files[f]["dbtype"] = "deb"
835         else:
836             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
837
838         entry["source"] = control.Find("Source", entry["package"])
839
840         # Get the source version
841         source = entry["source"]
842         source_version = ""
843
844         if source.find("(") != -1:
845             m = re_extract_src_version.match(source)
846             source = m.group(1)
847             source_version = m.group(2)
848
849         if not source_version:
850             source_version = self.pkg.files[f]["version"]
851
852         entry["source package"] = source
853         entry["source version"] = source_version
854
855         # Ensure the filename matches the contents of the .deb
856         m = re_isadeb.match(f)
857
858         #  package name
859         file_package = m.group(1)
860         if entry["package"] != file_package:
861             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
862                                 (f, file_package, entry["dbtype"], entry["package"]))
863         epochless_version = re_no_epoch.sub('', control.Find("Version"))
864
865         #  version
866         file_version = m.group(2)
867         if epochless_version != file_version:
868             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
869                                 (f, file_version, entry["dbtype"], epochless_version))
870
871         #  architecture
872         file_architecture = m.group(3)
873         if entry["architecture"] != file_architecture:
874             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
875                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
876
877         # Check for existent source
878         source_version = entry["source version"]
879         source_package = entry["source package"]
880         if self.pkg.changes["architecture"].has_key("source"):
881             if source_version != self.pkg.changes["version"]:
882                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
883                                     (source_version, f, self.pkg.changes["version"]))
884         else:
885             # Check in the SQL database
886             if not source_exists(source_package, source_version, suites = \
887                 self.pkg.changes["distribution"].keys(), session = session):
888                 # Check in one of the other directories
889                 source_epochless_version = re_no_epoch.sub('', source_version)
890                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
891                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
892                     entry["byhand"] = 1
893                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
894                     entry["new"] = 1
895                 else:
896                     dsc_file_exists = False
897                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
898                         if cnf.has_key("Dir::Queue::%s" % (myq)):
899                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
900                                 dsc_file_exists = True
901                                 break
902
903                     if not dsc_file_exists:
904                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
905
906         # Check the version and for file overwrites
907         self.check_binary_against_db(f, session)
908
909     def source_file_checks(self, f, session):
910         entry = self.pkg.files[f]
911
912         m = re_issource.match(f)
913         if not m:
914             return
915
916         entry["package"] = m.group(1)
917         entry["version"] = m.group(2)
918         entry["type"] = m.group(3)
919
920         # Ensure the source package name matches the Source filed in the .changes
921         if self.pkg.changes["source"] != entry["package"]:
922             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
923
924         # Ensure the source version matches the version in the .changes file
925         if re_is_orig_source.match(f):
926             changes_version = self.pkg.changes["chopversion2"]
927         else:
928             changes_version = self.pkg.changes["chopversion"]
929
930         if changes_version != entry["version"]:
931             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
932
933         # Ensure the .changes lists source in the Architecture field
934         if not self.pkg.changes["architecture"].has_key("source"):
935             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
936
937         # Check the signature of a .dsc file
938         if entry["type"] == "dsc":
939             # check_signature returns either:
940             #  (None, [list, of, rejects]) or (signature, [])
941             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
942             for j in rejects:
943                 self.rejects.append(j)
944
945         entry["architecture"] = "source"
946
947     def per_suite_file_checks(self, f, suite, session):
948         cnf = Config()
949         entry = self.pkg.files[f]
950
951         # Skip byhand
952         if entry.has_key("byhand"):
953             return
954
955         # Check we have fields we need to do these checks
956         oktogo = True
957         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
958             if not entry.has_key(m):
959                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
960                 oktogo = False
961
962         if not oktogo:
963             return
964
965         # Handle component mappings
966         for m in cnf.ValueList("ComponentMappings"):
967             (source, dest) = m.split()
968             if entry["component"] == source:
969                 entry["original component"] = source
970                 entry["component"] = dest
971
972         # Ensure the component is valid for the target suite
973         if cnf.has_key("Suite:%s::Components" % (suite)) and \
974            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
975             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
976             return
977
978         # Validate the component
979         if not get_component(entry["component"], session):
980             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
981             return
982
983         # See if the package is NEW
984         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
985             entry["new"] = 1
986
987         # Validate the priority
988         if entry["priority"].find('/') != -1:
989             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
990
991         # Determine the location
992         location = cnf["Dir::Pool"]
993         l = get_location(location, entry["component"], session=session)
994         if l is None:
995             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
996             entry["location id"] = -1
997         else:
998             entry["location id"] = l.location_id
999
1000         # Check the md5sum & size against existing files (if any)
1001         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1002
1003         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1004                                          entry["size"], entry["md5sum"], entry["location id"])
1005
1006         if found is None:
1007             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1008         elif found is False and poolfile is not None:
1009             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1010         else:
1011             if poolfile is None:
1012                 entry["files id"] = None
1013             else:
1014                 entry["files id"] = poolfile.file_id
1015
1016         # Check for packages that have moved from one component to another
1017         entry['suite'] = suite
1018         arch_list = [entry["architecture"], 'all']
1019         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1020             [suite], arch_list = arch_list, session = session)
1021         if component is not None:
1022             entry["othercomponents"] = component
1023
1024     def check_files(self, action=True):
1025         file_keys = self.pkg.files.keys()
1026         holding = Holding()
1027         cnf = Config()
1028
1029         if action:
1030             cwd = os.getcwd()
1031             os.chdir(self.pkg.directory)
1032             for f in file_keys:
1033                 ret = holding.copy_to_holding(f)
1034                 if ret is not None:
1035                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1036
1037             os.chdir(cwd)
1038
1039         # check we already know the changes file
1040         # [NB: this check must be done post-suite mapping]
1041         base_filename = os.path.basename(self.pkg.changes_file)
1042
1043         session = DBConn().session()
1044
1045         try:
1046             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1047             # if in the pool or in a queue other than unchecked, reject
1048             if (dbc.in_queue is None) \
1049                    or (dbc.in_queue is not None
1050                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1051                 self.rejects.append("%s file already known to dak" % base_filename)
1052         except NoResultFound, e:
1053             # not known, good
1054             pass
1055
1056         has_binaries = False
1057         has_source = False
1058
1059         for f, entry in self.pkg.files.items():
1060             # Ensure the file does not already exist in one of the accepted directories
1061             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1062                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1063                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1064                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1065
1066             if not re_taint_free.match(f):
1067                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1068
1069             # Check the file is readable
1070             if os.access(f, os.R_OK) == 0:
1071                 # When running in -n, copy_to_holding() won't have
1072                 # generated the reject_message, so we need to.
1073                 if action:
1074                     if os.path.exists(f):
1075                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1076                     else:
1077                         # Don't directly reject, mark to check later to deal with orig's
1078                         # we can find in the pool
1079                         self.later_check_files.append(f)
1080                 entry["type"] = "unreadable"
1081                 continue
1082
1083             # If it's byhand skip remaining checks
1084             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1085                 entry["byhand"] = 1
1086                 entry["type"] = "byhand"
1087
1088             # Checks for a binary package...
1089             elif re_isadeb.match(f):
1090                 has_binaries = True
1091                 entry["type"] = "deb"
1092
1093                 # This routine appends to self.rejects/warnings as appropriate
1094                 self.binary_file_checks(f, session)
1095
1096             # Checks for a source package...
1097             elif re_issource.match(f):
1098                 has_source = True
1099
1100                 # This routine appends to self.rejects/warnings as appropriate
1101                 self.source_file_checks(f, session)
1102
1103             # Not a binary or source package?  Assume byhand...
1104             else:
1105                 entry["byhand"] = 1
1106                 entry["type"] = "byhand"
1107
1108             # Per-suite file checks
1109             entry["oldfiles"] = {}
1110             for suite in self.pkg.changes["distribution"].keys():
1111                 self.per_suite_file_checks(f, suite, session)
1112
1113         session.close()
1114
1115         # If the .changes file says it has source, it must have source.
1116         if self.pkg.changes["architecture"].has_key("source"):
1117             if not has_source:
1118                 self.rejects.append("no source found and Architecture line in changes mention source.")
1119
1120             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1121                 self.rejects.append("source only uploads are not supported.")
1122
1123     ###########################################################################
1124
1125     def __dsc_filename(self):
1126         """
1127         Returns: (Status, Dsc_Filename)
1128         where
1129           Status: Boolean; True when there was no error, False otherwise
1130           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1131         """
1132         dsc_filename = None
1133
1134         # find the dsc
1135         for name, entry in self.pkg.files.items():
1136             if entry.has_key("type") and entry["type"] == "dsc":
1137                 if dsc_filename:
1138                     return False, "cannot process a .changes file with multiple .dsc's."
1139                 else:
1140                     dsc_filename = name
1141
1142         if not dsc_filename:
1143             return False, "source uploads must contain a dsc file"
1144
1145         return True, dsc_filename
1146
1147     def load_dsc(self, action=True, signing_rules=1):
1148         """
1149         Find and load the dsc from self.pkg.files into self.dsc
1150
1151         Returns: (Status, Reason)
1152         where
1153           Status: Boolean; True when there was no error, False otherwise
1154           Reason: String; When Status is False this describes the error
1155         """
1156
1157         # find the dsc
1158         (status, dsc_filename) = self.__dsc_filename()
1159         if not status:
1160             # If status is false, dsc_filename has the reason
1161             return False, dsc_filename
1162
1163         try:
1164             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1165         except CantOpenError:
1166             if not action:
1167                 return False, "%s: can't read file." % (dsc_filename)
1168         except ParseChangesError, line:
1169             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1170         except InvalidDscError, line:
1171             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1172         except ChangesUnicodeError:
1173             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1174
1175         return True, None
1176
1177     ###########################################################################
1178
1179     def check_dsc(self, action=True, session=None):
1180         """Returns bool indicating whether or not the source changes are valid"""
1181         # Ensure there is source to check
1182         if not self.pkg.changes["architecture"].has_key("source"):
1183             return True
1184
1185         (status, reason) = self.load_dsc(action=action)
1186         if not status:
1187             self.rejects.append(reason)
1188             return False
1189         (status, dsc_filename) = self.__dsc_filename()
1190         if not status:
1191             # If status is false, dsc_filename has the reason
1192             self.rejects.append(dsc_filename)
1193             return False
1194
1195         # Build up the file list of files mentioned by the .dsc
1196         try:
1197             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1198         except NoFilesFieldError:
1199             self.rejects.append("%s: no Files: field." % (dsc_filename))
1200             return False
1201         except UnknownFormatError, format:
1202             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1203             return False
1204         except ParseChangesError, line:
1205             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1206             return False
1207
1208         # Enforce mandatory fields
1209         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1210             if not self.pkg.dsc.has_key(i):
1211                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1212                 return False
1213
1214         # Validate the source and version fields
1215         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1216             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1217         if not re_valid_version.match(self.pkg.dsc["version"]):
1218             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1219
1220         # Only a limited list of source formats are allowed in each suite
1221         for dist in self.pkg.changes["distribution"].keys():
1222             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1223             if self.pkg.dsc["format"] not in allowed:
1224                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1225
1226         # Validate the Maintainer field
1227         try:
1228             # We ignore the return value
1229             fix_maintainer(self.pkg.dsc["maintainer"])
1230         except ParseMaintError, msg:
1231             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1232                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1233
1234         # Validate the build-depends field(s)
1235         for field_name in [ "build-depends", "build-depends-indep" ]:
1236             field = self.pkg.dsc.get(field_name)
1237             if field:
1238                 # Have apt try to parse them...
1239                 try:
1240                     apt_pkg.ParseSrcDepends(field)
1241                 except:
1242                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1243
1244         # Ensure the version number in the .dsc matches the version number in the .changes
1245         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1246         changes_version = self.pkg.files[dsc_filename]["version"]
1247
1248         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1249             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1250
1251         # Ensure the Files field contain only what's expected
1252         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1253
1254         # Ensure source is newer than existing source in target suites
1255         session = DBConn().session()
1256         self.check_source_against_db(dsc_filename, session)
1257         self.check_dsc_against_db(dsc_filename, session)
1258
1259         dbchg = get_dbchange(self.pkg.changes_file, session)
1260
1261         # Finally, check if we're missing any files
1262         for f in self.later_check_files:
1263             print 'XXX: %s' % f
1264             # Check if we've already processed this file if we have a dbchg object
1265             ok = False
1266             if dbchg:
1267                 for pf in dbchg.files:
1268                     if pf.filename == f and pf.processed:
1269                         self.notes.append('%s was already processed so we can go ahead' % f)
1270                         ok = True
1271                         del self.pkg.files[f]
1272             if not ok:
1273                 self.rejects.append("Could not find file %s references in changes" % f)
1274
1275         session.close()
1276
1277         return True
1278
1279     ###########################################################################
1280
1281     def get_changelog_versions(self, source_dir):
1282         """Extracts a the source package and (optionally) grabs the
1283         version history out of debian/changelog for the BTS."""
1284
1285         cnf = Config()
1286
1287         # Find the .dsc (again)
1288         dsc_filename = None
1289         for f in self.pkg.files.keys():
1290             if self.pkg.files[f]["type"] == "dsc":
1291                 dsc_filename = f
1292
1293         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1294         if not dsc_filename:
1295             return
1296
1297         # Create a symlink mirror of the source files in our temporary directory
1298         for f in self.pkg.files.keys():
1299             m = re_issource.match(f)
1300             if m:
1301                 src = os.path.join(source_dir, f)
1302                 # If a file is missing for whatever reason, give up.
1303                 if not os.path.exists(src):
1304                     return
1305                 ftype = m.group(3)
1306                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1307                    self.pkg.orig_files[f].has_key("path"):
1308                     continue
1309                 dest = os.path.join(os.getcwd(), f)
1310                 os.symlink(src, dest)
1311
1312         # If the orig files are not a part of the upload, create symlinks to the
1313         # existing copies.
1314         for orig_file in self.pkg.orig_files.keys():
1315             if not self.pkg.orig_files[orig_file].has_key("path"):
1316                 continue
1317             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1318             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1319
1320         # Extract the source
1321         try:
1322             unpacked = UnpackedSource(dsc_filename)
1323         except:
1324             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1325             return
1326
1327         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1328             return
1329
1330         # Get the upstream version
1331         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1332         if re_strip_revision.search(upstr_version):
1333             upstr_version = re_strip_revision.sub('', upstr_version)
1334
1335         # Ensure the changelog file exists
1336         changelog_file = unpacked.get_changelog_file()
1337         if changelog_file is None:
1338             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1339             return
1340
1341         # Parse the changelog
1342         self.pkg.dsc["bts changelog"] = ""
1343         for line in changelog_file.readlines():
1344             m = re_changelog_versions.match(line)
1345             if m:
1346                 self.pkg.dsc["bts changelog"] += line
1347         changelog_file.close()
1348         unpacked.cleanup()
1349
1350         # Check we found at least one revision in the changelog
1351         if not self.pkg.dsc["bts changelog"]:
1352             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1353
1354     def check_source(self):
1355         # Bail out if:
1356         #    a) there's no source
1357         if not self.pkg.changes["architecture"].has_key("source"):
1358             return
1359
1360         tmpdir = utils.temp_dirname()
1361
1362         # Move into the temporary directory
1363         cwd = os.getcwd()
1364         os.chdir(tmpdir)
1365
1366         # Get the changelog version history
1367         self.get_changelog_versions(cwd)
1368
1369         # Move back and cleanup the temporary tree
1370         os.chdir(cwd)
1371
1372         try:
1373             shutil.rmtree(tmpdir)
1374         except OSError, e:
1375             if e.errno != errno.EACCES:
1376                 print "foobar"
1377                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1378
1379             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1380             # We probably have u-r or u-w directories so chmod everything
1381             # and try again.
1382             cmd = "chmod -R u+rwx %s" % (tmpdir)
1383             result = os.system(cmd)
1384             if result != 0:
1385                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1386             shutil.rmtree(tmpdir)
1387         except Exception, e:
1388             print "foobar2 (%s)" % e
1389             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1390
1391     ###########################################################################
1392     def ensure_hashes(self):
1393         # Make sure we recognise the format of the Files: field in the .changes
1394         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1395         if len(format) == 2:
1396             format = int(format[0]), int(format[1])
1397         else:
1398             format = int(float(format[0])), 0
1399
1400         # We need to deal with the original changes blob, as the fields we need
1401         # might not be in the changes dict serialised into the .dak anymore.
1402         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1403
1404         # Copy the checksums over to the current changes dict.  This will keep
1405         # the existing modifications to it intact.
1406         for field in orig_changes:
1407             if field.startswith('checksums-'):
1408                 self.pkg.changes[field] = orig_changes[field]
1409
1410         # Check for unsupported hashes
1411         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1412             self.rejects.append(j)
1413
1414         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1415             self.rejects.append(j)
1416
1417         # We have to calculate the hash if we have an earlier changes version than
1418         # the hash appears in rather than require it exist in the changes file
1419         for hashname, hashfunc, version in utils.known_hashes:
1420             # TODO: Move _ensure_changes_hash into this class
1421             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1422                 self.rejects.append(j)
1423             if "source" in self.pkg.changes["architecture"]:
1424                 # TODO: Move _ensure_dsc_hash into this class
1425                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1426                     self.rejects.append(j)
1427
1428     def check_hashes(self):
1429         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1430             self.rejects.append(m)
1431
1432         for m in utils.check_size(".changes", self.pkg.files):
1433             self.rejects.append(m)
1434
1435         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1436             self.rejects.append(m)
1437
1438         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1439             self.rejects.append(m)
1440
1441         self.ensure_hashes()
1442
1443     ###########################################################################
1444
1445     def ensure_orig(self, target_dir='.', session=None):
1446         """
1447         Ensures that all orig files mentioned in the changes file are present
1448         in target_dir. If they do not exist, they are symlinked into place.
1449
1450         An list containing the symlinks that were created are returned (so they
1451         can be removed).
1452         """
1453
1454         symlinked = []
1455         cnf = Config()
1456
1457         for filename, entry in self.pkg.dsc_files.iteritems():
1458             if not re_is_orig_source.match(filename):
1459                 # File is not an orig; ignore
1460                 continue
1461
1462             if os.path.exists(filename):
1463                 # File exists, no need to continue
1464                 continue
1465
1466             def symlink_if_valid(path):
1467                 f = utils.open_file(path)
1468                 md5sum = apt_pkg.md5sum(f)
1469                 f.close()
1470
1471                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1472                 expected = (int(entry['size']), entry['md5sum'])
1473
1474                 if fingerprint != expected:
1475                     return False
1476
1477                 dest = os.path.join(target_dir, filename)
1478
1479                 os.symlink(path, dest)
1480                 symlinked.append(dest)
1481
1482                 return True
1483
1484             session_ = session
1485             if session is None:
1486                 session_ = DBConn().session()
1487
1488             found = False
1489
1490             # Look in the pool
1491             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1492                 poolfile_path = os.path.join(
1493                     poolfile.location.path, poolfile.filename
1494                 )
1495
1496                 if symlink_if_valid(poolfile_path):
1497                     found = True
1498                     break
1499
1500             if session is None:
1501                 session_.close()
1502
1503             if found:
1504                 continue
1505
1506             # Look in some other queues for the file
1507             queues = ('New', 'Byhand', 'ProposedUpdates',
1508                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1509
1510             for queue in queues:
1511                 if not cnf.get('Dir::Queue::%s' % queue):
1512                     continue
1513
1514                 queuefile_path = os.path.join(
1515                     cnf['Dir::Queue::%s' % queue], filename
1516                 )
1517
1518                 if not os.path.exists(queuefile_path):
1519                     # Does not exist in this queue
1520                     continue
1521
1522                 if symlink_if_valid(queuefile_path):
1523                     break
1524
1525         return symlinked
1526
1527     ###########################################################################
1528
1529     def check_lintian(self):
1530         """
1531         Extends self.rejects by checking the output of lintian against tags
1532         specified in Dinstall::LintianTags.
1533         """
1534
1535         cnf = Config()
1536
1537         # Don't reject binary uploads
1538         if not self.pkg.changes['architecture'].has_key('source'):
1539             return
1540
1541         # Only check some distributions
1542         for dist in ('unstable', 'experimental'):
1543             if dist in self.pkg.changes['distribution']:
1544                 break
1545         else:
1546             return
1547
1548         # If we do not have a tagfile, don't do anything
1549         tagfile = cnf.get("Dinstall::LintianTags")
1550         if not tagfile:
1551             return
1552
1553         # Parse the yaml file
1554         sourcefile = file(tagfile, 'r')
1555         sourcecontent = sourcefile.read()
1556         sourcefile.close()
1557
1558         try:
1559             lintiantags = yaml.load(sourcecontent)['lintian']
1560         except yaml.YAMLError, msg:
1561             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1562             return
1563
1564         # Try and find all orig mentioned in the .dsc
1565         symlinked = self.ensure_orig()
1566
1567         # Setup the input file for lintian
1568         fd, temp_filename = utils.temp_filename()
1569         temptagfile = os.fdopen(fd, 'w')
1570         for tags in lintiantags.values():
1571             temptagfile.writelines(['%s\n' % x for x in tags])
1572         temptagfile.close()
1573
1574         try:
1575             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1576                 (temp_filename, self.pkg.changes_file)
1577
1578             result, output = commands.getstatusoutput(cmd)
1579         finally:
1580             # Remove our tempfile and any symlinks we created
1581             os.unlink(temp_filename)
1582
1583             for symlink in symlinked:
1584                 os.unlink(symlink)
1585
1586         if result == 2:
1587             utils.warn("lintian failed for %s [return code: %s]." % \
1588                 (self.pkg.changes_file, result))
1589             utils.warn(utils.prefix_multi_line_string(output, \
1590                 " [possible output:] "))
1591
1592         def log(*txt):
1593             if self.logger:
1594                 self.logger.log(
1595                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1596                 )
1597
1598         # Generate messages
1599         parsed_tags = parse_lintian_output(output)
1600         self.rejects.extend(
1601             generate_reject_messages(parsed_tags, lintiantags, log=log)
1602         )
1603
1604     ###########################################################################
1605     def check_urgency(self):
1606         cnf = Config()
1607         if self.pkg.changes["architecture"].has_key("source"):
1608             if not self.pkg.changes.has_key("urgency"):
1609                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1610             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1611             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1612                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1613                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1614                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1615
1616     ###########################################################################
1617
1618     # Sanity check the time stamps of files inside debs.
1619     # [Files in the near future cause ugly warnings and extreme time
1620     #  travel can cause errors on extraction]
1621
1622     def check_timestamps(self):
1623         Cnf = Config()
1624
1625         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1626         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1627         tar = TarTime(future_cutoff, past_cutoff)
1628
1629         for filename, entry in self.pkg.files.items():
1630             if entry["type"] == "deb":
1631                 tar.reset()
1632                 try:
1633                     deb_file = utils.open_file(filename)
1634                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1635                     deb_file.seek(0)
1636                     try:
1637                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1638                     except SystemError, e:
1639                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1640                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1641                             raise
1642                         deb_file.seek(0)
1643                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1644
1645                     deb_file.close()
1646
1647                     future_files = tar.future_files.keys()
1648                     if future_files:
1649                         num_future_files = len(future_files)
1650                         future_file = future_files[0]
1651                         future_date = tar.future_files[future_file]
1652                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1653                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1654
1655                     ancient_files = tar.ancient_files.keys()
1656                     if ancient_files:
1657                         num_ancient_files = len(ancient_files)
1658                         ancient_file = ancient_files[0]
1659                         ancient_date = tar.ancient_files[ancient_file]
1660                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1661                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1662                 except:
1663                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1664
1665     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1666         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1667             sponsored = False
1668         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1669             sponsored = False
1670             if uid_name == "":
1671                 sponsored = True
1672         else:
1673             sponsored = True
1674             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1675                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1676                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1677                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1678                         self.pkg.changes["sponsoremail"] = uid_email
1679
1680         return sponsored
1681
1682
1683     ###########################################################################
1684     # check_signed_by_key checks
1685     ###########################################################################
1686
1687     def check_signed_by_key(self):
1688         """Ensure the .changes is signed by an authorized uploader."""
1689         session = DBConn().session()
1690
1691         # First of all we check that the person has proper upload permissions
1692         # and that this upload isn't blocked
1693         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1694
1695         if fpr is None:
1696             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1697             return
1698
1699         # TODO: Check that import-keyring adds UIDs properly
1700         if not fpr.uid:
1701             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1702             return
1703
1704         # Check that the fingerprint which uploaded has permission to do so
1705         self.check_upload_permissions(fpr, session)
1706
1707         # Check that this package is not in a transition
1708         self.check_transition(session)
1709
1710         session.close()
1711
1712
1713     def check_upload_permissions(self, fpr, session):
1714         # Check any one-off upload blocks
1715         self.check_upload_blocks(fpr, session)
1716
1717         # If the source_acl is None, source is never allowed
1718         if fpr.source_acl is None:
1719             if self.pkg.changes["architecture"].has_key("source"):
1720                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1721                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1722                 self.rejects.append(rej)
1723                 return
1724         # Do DM as a special case
1725         # DM is a special case unfortunately, so we check it first
1726         # (keys with no source access get more access than DMs in one
1727         #  way; DMs can only upload for their packages whether source
1728         #  or binary, whereas keys with no access might be able to
1729         #  upload some binaries)
1730         elif fpr.source_acl.access_level == 'dm':
1731             self.check_dm_upload(fpr, session)
1732         else:
1733             # If not a DM, we allow full upload rights
1734             uid_email = "%s@debian.org" % (fpr.uid.uid)
1735             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1736
1737
1738         # Check binary upload permissions
1739         # By this point we know that DMs can't have got here unless they
1740         # are allowed to deal with the package concerned so just apply
1741         # normal checks
1742         if fpr.binary_acl.access_level == 'full':
1743             return
1744
1745         # Otherwise we're in the map case
1746         tmparches = self.pkg.changes["architecture"].copy()
1747         tmparches.pop('source', None)
1748
1749         for bam in fpr.binary_acl_map:
1750             tmparches.pop(bam.architecture.arch_string, None)
1751
1752         if len(tmparches.keys()) > 0:
1753             if fpr.binary_reject:
1754                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1755                 if len(tmparches.keys()) == 1:
1756                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1757                 else:
1758                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1759                 self.rejects.append(rej)
1760             else:
1761                 # TODO: This is where we'll implement reject vs throw away binaries later
1762                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1763                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1764                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1765                 self.rejects.append(rej)
1766
1767
1768     def check_upload_blocks(self, fpr, session):
1769         """Check whether any upload blocks apply to this source, source
1770            version, uid / fpr combination"""
1771
1772         def block_rej_template(fb):
1773             rej = 'Manual upload block in place for package %s' % fb.source
1774             if fb.version is not None:
1775                 rej += ', version %s' % fb.version
1776             return rej
1777
1778         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1779             # version is None if the block applies to all versions
1780             if fb.version is None or fb.version == self.pkg.changes['version']:
1781                 # Check both fpr and uid - either is enough to cause a reject
1782                 if fb.fpr is not None:
1783                     if fb.fpr.fingerprint == fpr.fingerprint:
1784                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1785                 if fb.uid is not None:
1786                     if fb.uid == fpr.uid:
1787                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1788
1789
1790     def check_dm_upload(self, fpr, session):
1791         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1792         ## none of the uploaded packages are NEW
1793         rej = False
1794         for f in self.pkg.files.keys():
1795             if self.pkg.files[f].has_key("byhand"):
1796                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1797                 rej = True
1798             if self.pkg.files[f].has_key("new"):
1799                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1800                 rej = True
1801
1802         if rej:
1803             return
1804
1805         r = get_newest_source(self.pkg.changes["source"], session)
1806
1807         if r is None:
1808             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1809             self.rejects.append(rej)
1810             return
1811
1812         if not r.dm_upload_allowed:
1813             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1814             self.rejects.append(rej)
1815             return
1816
1817         ## the Maintainer: field of the uploaded .changes file corresponds with
1818         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1819         ## uploads)
1820         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1821             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1822
1823         ## the most recent version of the package uploaded to unstable or
1824         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1825         ## non-developer maintainers cannot NMU or hijack packages)
1826
1827         # uploader includes the maintainer
1828         accept = False
1829         for uploader in r.uploaders:
1830             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1831             # Eww - I hope we never have two people with the same name in Debian
1832             if email == fpr.uid.uid or name == fpr.uid.name:
1833                 accept = True
1834                 break
1835
1836         if not accept:
1837             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1838             return
1839
1840         ## none of the packages are being taken over from other source packages
1841         for b in self.pkg.changes["binary"].keys():
1842             for suite in self.pkg.changes["distribution"].keys():
1843                 for s in get_source_by_package_and_suite(b, suite, session):
1844                     if s.source != self.pkg.changes["source"]:
1845                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1846
1847
1848
1849     def check_transition(self, session):
1850         cnf = Config()
1851
1852         sourcepkg = self.pkg.changes["source"]
1853
1854         # No sourceful upload -> no need to do anything else, direct return
1855         # We also work with unstable uploads, not experimental or those going to some
1856         # proposed-updates queue
1857         if "source" not in self.pkg.changes["architecture"] or \
1858            "unstable" not in self.pkg.changes["distribution"]:
1859             return
1860
1861         # Also only check if there is a file defined (and existant) with
1862         # checks.
1863         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1864         if transpath == "" or not os.path.exists(transpath):
1865             return
1866
1867         # Parse the yaml file
1868         sourcefile = file(transpath, 'r')
1869         sourcecontent = sourcefile.read()
1870         try:
1871             transitions = yaml.load(sourcecontent)
1872         except yaml.YAMLError, msg:
1873             # This shouldn't happen, there is a wrapper to edit the file which
1874             # checks it, but we prefer to be safe than ending up rejecting
1875             # everything.
1876             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1877             return
1878
1879         # Now look through all defined transitions
1880         for trans in transitions:
1881             t = transitions[trans]
1882             source = t["source"]
1883             expected = t["new"]
1884
1885             # Will be None if nothing is in testing.
1886             current = get_source_in_suite(source, "testing", session)
1887             if current is not None:
1888                 compare = apt_pkg.VersionCompare(current.version, expected)
1889
1890             if current is None or compare < 0:
1891                 # This is still valid, the current version in testing is older than
1892                 # the new version we wait for, or there is none in testing yet
1893
1894                 # Check if the source we look at is affected by this.
1895                 if sourcepkg in t['packages']:
1896                     # The source is affected, lets reject it.
1897
1898                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1899                         sourcepkg, trans)
1900
1901                     if current is not None:
1902                         currentlymsg = "at version %s" % (current.version)
1903                     else:
1904                         currentlymsg = "not present in testing"
1905
1906                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1907
1908                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1909 is part of a testing transition designed to get %s migrated (it is
1910 currently %s, we need version %s).  This transition is managed by the
1911 Release Team, and %s is the Release-Team member responsible for it.
1912 Please mail debian-release@lists.debian.org or contact %s directly if you
1913 need further assistance.  You might want to upload to experimental until this
1914 transition is done."""
1915                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1916
1917                     self.rejects.append(rejectmsg)
1918                     return
1919
1920     ###########################################################################
1921     # End check_signed_by_key checks
1922     ###########################################################################
1923
1924     def build_summaries(self):
1925         """ Build a summary of changes the upload introduces. """
1926
1927         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1928
1929         short_summary = summary
1930
1931         # This is for direport's benefit...
1932         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1933
1934         if byhand or new:
1935             summary += "Changes: " + f
1936
1937         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1938
1939         summary += self.announce(short_summary, 0)
1940
1941         return (summary, short_summary)
1942
1943     ###########################################################################
1944
1945     def close_bugs(self, summary, action):
1946         """
1947         Send mail to close bugs as instructed by the closes field in the changes file.
1948         Also add a line to summary if any work was done.
1949
1950         @type summary: string
1951         @param summary: summary text, as given by L{build_summaries}
1952
1953         @type action: bool
1954         @param action: Set to false no real action will be done.
1955
1956         @rtype: string
1957         @return: summary. If action was taken, extended by the list of closed bugs.
1958
1959         """
1960
1961         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1962
1963         bugs = self.pkg.changes["closes"].keys()
1964
1965         if not bugs:
1966             return summary
1967
1968         bugs.sort()
1969         summary += "Closing bugs: "
1970         for bug in bugs:
1971             summary += "%s " % (bug)
1972             if action:
1973                 self.update_subst()
1974                 self.Subst["__BUG_NUMBER__"] = bug
1975                 if self.pkg.changes["distribution"].has_key("stable"):
1976                     self.Subst["__STABLE_WARNING__"] = """
1977 Note that this package is not part of the released stable Debian
1978 distribution.  It may have dependencies on other unreleased software,
1979 or other instabilities.  Please take care if you wish to install it.
1980 The update will eventually make its way into the next released Debian
1981 distribution."""
1982                 else:
1983                     self.Subst["__STABLE_WARNING__"] = ""
1984                 mail_message = utils.TemplateSubst(self.Subst, template)
1985                 utils.send_mail(mail_message)
1986
1987                 # Clear up after ourselves
1988                 del self.Subst["__BUG_NUMBER__"]
1989                 del self.Subst["__STABLE_WARNING__"]
1990
1991         if action and self.logger:
1992             self.logger.log(["closing bugs"] + bugs)
1993
1994         summary += "\n"
1995
1996         return summary
1997
1998     ###########################################################################
1999
2000     def announce(self, short_summary, action):
2001         """
2002         Send an announce mail about a new upload.
2003
2004         @type short_summary: string
2005         @param short_summary: Short summary text to include in the mail
2006
2007         @type action: bool
2008         @param action: Set to false no real action will be done.
2009
2010         @rtype: string
2011         @return: Textstring about action taken.
2012
2013         """
2014
2015         cnf = Config()
2016         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2017
2018         # Only do announcements for source uploads with a recent dpkg-dev installed
2019         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2020            self.pkg.changes["architecture"].has_key("source"):
2021             return ""
2022
2023         lists_done = {}
2024         summary = ""
2025
2026         self.Subst["__SHORT_SUMMARY__"] = short_summary
2027
2028         for dist in self.pkg.changes["distribution"].keys():
2029             suite = get_suite(dist)
2030             if suite is None: continue
2031             announce_list = suite.announce
2032             if announce_list == "" or lists_done.has_key(announce_list):
2033                 continue
2034
2035             lists_done[announce_list] = 1
2036             summary += "Announcing to %s\n" % (announce_list)
2037
2038             if action:
2039                 self.update_subst()
2040                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2041                 if cnf.get("Dinstall::TrackingServer") and \
2042                    self.pkg.changes["architecture"].has_key("source"):
2043                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2044                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2045
2046                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2047                 utils.send_mail(mail_message)
2048
2049                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2050
2051         if cnf.FindB("Dinstall::CloseBugs"):
2052             summary = self.close_bugs(summary, action)
2053
2054         del self.Subst["__SHORT_SUMMARY__"]
2055
2056         return summary
2057
2058     ###########################################################################
2059     @session_wrapper
2060     def accept (self, summary, short_summary, session=None):
2061         """
2062         Accept an upload.
2063
2064         This moves all files referenced from the .changes into the pool,
2065         sends the accepted mail, announces to lists, closes bugs and
2066         also checks for override disparities. If enabled it will write out
2067         the version history for the BTS Version Tracking and will finally call
2068         L{queue_build}.
2069
2070         @type summary: string
2071         @param summary: Summary text
2072
2073         @type short_summary: string
2074         @param short_summary: Short summary
2075         """
2076
2077         cnf = Config()
2078         stats = SummaryStats()
2079
2080         print "Installing."
2081         self.logger.log(["installing changes", self.pkg.changes_file])
2082
2083         binaries = []
2084         poolfiles = []
2085
2086         # Add the .dsc file to the DB first
2087         for newfile, entry in self.pkg.files.items():
2088             if entry["type"] == "dsc":
2089                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2090                 for j in pfs:
2091                     poolfiles.append(j)
2092
2093         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2094         for newfile, entry in self.pkg.files.items():
2095             if entry["type"] == "deb":
2096                 b, pf = add_deb_to_db(self, newfile, session)
2097                 binaries.append(b)
2098                 poolfiles.append(pf)
2099
2100         # If this is a sourceful diff only upload that is moving
2101         # cross-component we need to copy the .orig files into the new
2102         # component too for the same reasons as above.
2103         # XXX: mhy: I think this should be in add_dsc_to_db
2104         if self.pkg.changes["architecture"].has_key("source"):
2105             for orig_file in self.pkg.orig_files.keys():
2106                 if not self.pkg.orig_files[orig_file].has_key("id"):
2107                     continue # Skip if it's not in the pool
2108                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2109                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2110                     continue # Skip if the location didn't change
2111
2112                 # Do the move
2113                 oldf = get_poolfile_by_id(orig_file_id, session)
2114                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2115                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2116                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2117
2118                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2119
2120                 # TODO: Care about size/md5sum collisions etc
2121                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2122
2123                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2124                 if newf is None:
2125                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2126                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2127
2128                     session.flush()
2129
2130                     # Don't reference the old file from this changes
2131                     for p in poolfiles:
2132                         if p.file_id == oldf.file_id:
2133                             poolfiles.remove(p)
2134
2135                     poolfiles.append(newf)
2136
2137                     # Fix up the DSC references
2138                     toremove = []
2139
2140                     for df in source.srcfiles:
2141                         if df.poolfile.file_id == oldf.file_id:
2142                             # Add a new DSC entry and mark the old one for deletion
2143                             # Don't do it in the loop so we don't change the thing we're iterating over
2144                             newdscf = DSCFile()
2145                             newdscf.source_id = source.source_id
2146                             newdscf.poolfile_id = newf.file_id
2147                             session.add(newdscf)
2148
2149                             toremove.append(df)
2150
2151                     for df in toremove:
2152                         session.delete(df)
2153
2154                     # Flush our changes
2155                     session.flush()
2156
2157                     # Make sure that our source object is up-to-date
2158                     session.expire(source)
2159
2160         # Add changelog information to the database
2161         self.store_changelog()
2162
2163         # Install the files into the pool
2164         for newfile, entry in self.pkg.files.items():
2165             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2166             utils.move(newfile, destination)
2167             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2168             stats.accept_bytes += float(entry["size"])
2169
2170         # Copy the .changes file across for suite which need it.
2171         copy_changes = dict([(x.copychanges, '')
2172                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2173                              if x.copychanges is not None])
2174
2175         for dest in copy_changes.keys():
2176             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2177
2178         # We're done - commit the database changes
2179         session.commit()
2180         # Our SQL session will automatically start a new transaction after
2181         # the last commit
2182
2183         # Now ensure that the metadata has been added
2184         # This has to be done after we copy the files into the pool
2185         # For source if we have it:
2186         if self.pkg.changes["architecture"].has_key("source"):
2187             import_metadata_into_db(source, session)
2188
2189         # Now for any of our binaries
2190         for b in binaries:
2191             import_metadata_into_db(b, session)
2192
2193         session.commit()
2194
2195         # Move the .changes into the 'done' directory
2196         ye, mo, da = time.gmtime()[0:3]
2197         donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2198         if not os.path.isdir(donedir):
2199             os.makedirs(donedir)
2200
2201         utils.move(self.pkg.changes_file,
2202                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2203
2204         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2205             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2206
2207         self.update_subst()
2208         self.Subst["__SUMMARY__"] = summary
2209         mail_message = utils.TemplateSubst(self.Subst,
2210                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2211         utils.send_mail(mail_message)
2212         self.announce(short_summary, 1)
2213
2214         ## Helper stuff for DebBugs Version Tracking
2215         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2216             if self.pkg.changes["architecture"].has_key("source"):
2217                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2218                 version_history = os.fdopen(fd, 'w')
2219                 version_history.write(self.pkg.dsc["bts changelog"])
2220                 version_history.close()
2221                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2222                                       self.pkg.changes_file[:-8]+".versions")
2223                 os.rename(temp_filename, filename)
2224                 os.chmod(filename, 0644)
2225
2226             # Write out the binary -> source mapping.
2227             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2228             debinfo = os.fdopen(fd, 'w')
2229             for name, entry in sorted(self.pkg.files.items()):
2230                 if entry["type"] == "deb":
2231                     line = " ".join([entry["package"], entry["version"],
2232                                      entry["architecture"], entry["source package"],
2233                                      entry["source version"]])
2234                     debinfo.write(line+"\n")
2235             debinfo.close()
2236             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2237                                   self.pkg.changes_file[:-8]+".debinfo")
2238             os.rename(temp_filename, filename)
2239             os.chmod(filename, 0644)
2240
2241         session.commit()
2242
2243         # Set up our copy queues (e.g. buildd queues)
2244         for suite_name in self.pkg.changes["distribution"].keys():
2245             suite = get_suite(suite_name, session)
2246             for q in suite.copy_queues:
2247                 for f in poolfiles:
2248                     q.add_file_from_pool(f)
2249
2250         session.commit()
2251
2252         # Finally...
2253         stats.accept_count += 1
2254
2255     def check_override(self):
2256         """
2257         Checks override entries for validity. Mails "Override disparity" warnings,
2258         if that feature is enabled.
2259
2260         Abandons the check if
2261           - override disparity checks are disabled
2262           - mail sending is disabled
2263         """
2264
2265         cnf = Config()
2266
2267         # Abandon the check if override disparity checks have been disabled
2268         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2269             return
2270
2271         summary = self.pkg.check_override()
2272
2273         if summary == "":
2274             return
2275
2276         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2277
2278         self.update_subst()
2279         self.Subst["__SUMMARY__"] = summary
2280         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2281         utils.send_mail(mail_message)
2282         del self.Subst["__SUMMARY__"]
2283
2284     ###########################################################################
2285
2286     def remove(self, from_dir=None):
2287         """
2288         Used (for instance) in p-u to remove the package from unchecked
2289
2290         Also removes the package from holding area.
2291         """
2292         if from_dir is None:
2293             from_dir = self.pkg.directory
2294         h = Holding()
2295
2296         for f in self.pkg.files.keys():
2297             os.unlink(os.path.join(from_dir, f))
2298             if os.path.exists(os.path.join(h.holding_dir, f)):
2299                 os.unlink(os.path.join(h.holding_dir, f))
2300
2301         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2302         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2303             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2304
2305     ###########################################################################
2306
2307     def move_to_queue (self, queue):
2308         """
2309         Move files to a destination queue using the permissions in the table
2310         """
2311         h = Holding()
2312         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2313                    queue.path, perms=int(queue.change_perms, 8))
2314         for f in self.pkg.files.keys():
2315             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2316
2317     ###########################################################################
2318
2319     def force_reject(self, reject_files):
2320         """
2321         Forcefully move files from the current directory to the
2322         reject directory.  If any file already exists in the reject
2323         directory it will be moved to the morgue to make way for
2324         the new file.
2325
2326         @type reject_files: dict
2327         @param reject_files: file dictionary
2328
2329         """
2330
2331         cnf = Config()
2332
2333         for file_entry in reject_files:
2334             # Skip any files which don't exist or which we don't have permission to copy.
2335             if os.access(file_entry, os.R_OK) == 0:
2336                 continue
2337
2338             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2339
2340             try:
2341                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2342             except OSError, e:
2343                 # File exists?  Let's find a new name by adding a number
2344                 if e.errno == errno.EEXIST:
2345                     try:
2346                         dest_file = utils.find_next_free(dest_file, 255)
2347                     except NoFreeFilenameError:
2348                         # Something's either gone badly Pete Tong, or
2349                         # someone is trying to exploit us.
2350                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2351                         return
2352
2353                     # Make sure we really got it
2354                     try:
2355                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2356                     except OSError, e:
2357                         # Likewise
2358                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2359                         return
2360                 else:
2361                     raise
2362             # If we got here, we own the destination file, so we can
2363             # safely overwrite it.
2364             utils.move(file_entry, dest_file, 1, perms=0660)
2365             os.close(dest_fd)
2366
2367     ###########################################################################
2368     def do_reject (self, manual=0, reject_message="", notes=""):
2369         """
2370         Reject an upload. If called without a reject message or C{manual} is
2371         true, spawn an editor so the user can write one.
2372
2373         @type manual: bool
2374         @param manual: manual or automated rejection
2375
2376         @type reject_message: string
2377         @param reject_message: A reject message
2378
2379         @return: 0
2380
2381         """
2382         # If we weren't given a manual rejection message, spawn an
2383         # editor so the user can add one in...
2384         if manual and not reject_message:
2385             (fd, temp_filename) = utils.temp_filename()
2386             temp_file = os.fdopen(fd, 'w')
2387             if len(notes) > 0:
2388                 for note in notes:
2389                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2390                                     % (note.author, note.version, note.notedate, note.comment))
2391             temp_file.close()
2392             editor = os.environ.get("EDITOR","vi")
2393             answer = 'E'
2394             while answer == 'E':
2395                 os.system("%s %s" % (editor, temp_filename))
2396                 temp_fh = utils.open_file(temp_filename)
2397                 reject_message = "".join(temp_fh.readlines())
2398                 temp_fh.close()
2399                 print "Reject message:"
2400                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2401                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2402                 answer = "XXX"
2403                 while prompt.find(answer) == -1:
2404                     answer = utils.our_raw_input(prompt)
2405                     m = re_default_answer.search(prompt)
2406                     if answer == "":
2407                         answer = m.group(1)
2408                     answer = answer[:1].upper()
2409             os.unlink(temp_filename)
2410             if answer == 'A':
2411                 return 1
2412             elif answer == 'Q':
2413                 sys.exit(0)
2414
2415         print "Rejecting.\n"
2416
2417         cnf = Config()
2418
2419         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2420         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2421
2422         # Move all the files into the reject directory
2423         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2424         self.force_reject(reject_files)
2425
2426         # If we fail here someone is probably trying to exploit the race
2427         # so let's just raise an exception ...
2428         if os.path.exists(reason_filename):
2429             os.unlink(reason_filename)
2430         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2431
2432         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2433
2434         self.update_subst()
2435         if not manual:
2436             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2437             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2438             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2439             os.write(reason_fd, reject_message)
2440             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2441         else:
2442             # Build up the rejection email
2443             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2444             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2445             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2446             self.Subst["__REJECT_MESSAGE__"] = ""
2447             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2448             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2449             # Write the rejection email out as the <foo>.reason file
2450             os.write(reason_fd, reject_mail_message)
2451
2452         del self.Subst["__REJECTOR_ADDRESS__"]
2453         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2454         del self.Subst["__CC__"]
2455
2456         os.close(reason_fd)
2457
2458         # Send the rejection mail
2459         utils.send_mail(reject_mail_message)
2460
2461         if self.logger:
2462             self.logger.log(["rejected", self.pkg.changes_file])
2463
2464         return 0
2465
2466     ################################################################################
2467     def in_override_p(self, package, component, suite, binary_type, filename, session):
2468         """
2469         Check if a package already has override entries in the DB
2470
2471         @type package: string
2472         @param package: package name
2473
2474         @type component: string
2475         @param component: database id of the component
2476
2477         @type suite: int
2478         @param suite: database id of the suite
2479
2480         @type binary_type: string
2481         @param binary_type: type of the package
2482
2483         @type filename: string
2484         @param filename: filename we check
2485
2486         @return: the database result. But noone cares anyway.
2487
2488         """
2489
2490         cnf = Config()
2491
2492         if binary_type == "": # must be source
2493             file_type = "dsc"
2494         else:
2495             file_type = binary_type
2496
2497         # Override suite name; used for example with proposed-updates
2498         oldsuite = get_suite(suite, session)
2499         if (not oldsuite is None) and oldsuite.overridesuite:
2500             suite = oldsuite.overridesuite
2501
2502         result = get_override(package, suite, component, file_type, session)
2503
2504         # If checking for a source package fall back on the binary override type
2505         if file_type == "dsc" and len(result) < 1:
2506             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2507
2508         # Remember the section and priority so we can check them later if appropriate
2509         if len(result) > 0:
2510             result = result[0]
2511             self.pkg.files[filename]["override section"] = result.section.section
2512             self.pkg.files[filename]["override priority"] = result.priority.priority
2513             return result
2514
2515         return None
2516
2517     ################################################################################
2518     def get_anyversion(self, sv_list, suite):
2519         """
2520         @type sv_list: list
2521         @param sv_list: list of (suite, version) tuples to check
2522
2523         @type suite: string
2524         @param suite: suite name
2525
2526         Description: TODO
2527         """
2528         Cnf = Config()
2529         anyversion = None
2530         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2531         for (s, v) in sv_list:
2532             if s in [ x.lower() for x in anysuite ]:
2533                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2534                     anyversion = v
2535
2536         return anyversion
2537
2538     ################################################################################
2539
2540     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2541         """
2542         @type sv_list: list
2543         @param sv_list: list of (suite, version) tuples to check
2544
2545         @type filename: string
2546         @param filename: XXX
2547
2548         @type new_version: string
2549         @param new_version: XXX
2550
2551         Ensure versions are newer than existing packages in target
2552         suites and that cross-suite version checking rules as
2553         set out in the conf file are satisfied.
2554         """
2555
2556         cnf = Config()
2557
2558         # Check versions for each target suite
2559         for target_suite in self.pkg.changes["distribution"].keys():
2560             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2561             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2562
2563             # Enforce "must be newer than target suite" even if conffile omits it
2564             if target_suite not in must_be_newer_than:
2565                 must_be_newer_than.append(target_suite)
2566
2567             for (suite, existent_version) in sv_list:
2568                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2569
2570                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2571                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2572
2573                 if suite in must_be_older_than and vercmp > -1:
2574                     cansave = 0
2575
2576                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2577                         # we really use the other suite, ignoring the conflicting one ...
2578                         addsuite = self.pkg.changes["distribution-version"][suite]
2579
2580                         add_version = self.get_anyversion(sv_list, addsuite)
2581                         target_version = self.get_anyversion(sv_list, target_suite)
2582
2583                         if not add_version:
2584                             # not add_version can only happen if we map to a suite
2585                             # that doesn't enhance the suite we're propup'ing from.
2586                             # so "propup-ver x a b c; map a d" is a problem only if
2587                             # d doesn't enhance a.
2588                             #
2589                             # i think we could always propagate in this case, rather
2590                             # than complaining. either way, this isn't a REJECT issue
2591                             #
2592                             # And - we really should complain to the dorks who configured dak
2593                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2594                             self.pkg.changes.setdefault("propdistribution", {})
2595                             self.pkg.changes["propdistribution"][addsuite] = 1
2596                             cansave = 1
2597                         elif not target_version:
2598                             # not targets_version is true when the package is NEW
2599                             # we could just stick with the "...old version..." REJECT
2600                             # for this, I think.
2601                             self.rejects.append("Won't propogate NEW packages.")
2602                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2603                             # propogation would be redundant. no need to reject though.
2604                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2605                             cansave = 1
2606                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2607                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2608                             # propogate!!
2609                             self.warnings.append("Propogating upload to %s" % (addsuite))
2610                             self.pkg.changes.setdefault("propdistribution", {})
2611                             self.pkg.changes["propdistribution"][addsuite] = 1
2612                             cansave = 1
2613
2614                     if not cansave:
2615                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2616
2617     ################################################################################
2618     def check_binary_against_db(self, filename, session):
2619         # Ensure version is sane
2620         self.cross_suite_version_check( \
2621             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2622                 self.pkg.files[filename]["architecture"], session),
2623             filename, self.pkg.files[filename]["version"], sourceful=False)
2624
2625         # Check for any existing copies of the file
2626         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2627         q = q.filter_by(version=self.pkg.files[filename]["version"])
2628         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2629
2630         if q.count() > 0:
2631             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2632
2633     ################################################################################
2634
2635     def check_source_against_db(self, filename, session):
2636         source = self.pkg.dsc.get("source")
2637         version = self.pkg.dsc.get("version")
2638
2639         # Ensure version is sane
2640         self.cross_suite_version_check( \
2641             get_suite_version_by_source(source, session), filename, version,
2642             sourceful=True)
2643
2644     ################################################################################
2645     def check_dsc_against_db(self, filename, session):
2646         """
2647
2648         @warning: NB: this function can remove entries from the 'files' index [if
2649          the orig tarball is a duplicate of the one in the archive]; if
2650          you're iterating over 'files' and call this function as part of
2651          the loop, be sure to add a check to the top of the loop to
2652          ensure you haven't just tried to dereference the deleted entry.
2653
2654         """
2655
2656         Cnf = Config()
2657         self.pkg.orig_files = {} # XXX: do we need to clear it?
2658         orig_files = self.pkg.orig_files
2659
2660         # Try and find all files mentioned in the .dsc.  This has
2661         # to work harder to cope with the multiple possible
2662         # locations of an .orig.tar.gz.
2663         # The ordering on the select is needed to pick the newest orig
2664         # when it exists in multiple places.
2665         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2666             found = None
2667             if self.pkg.files.has_key(dsc_name):
2668                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2669                 actual_size = int(self.pkg.files[dsc_name]["size"])
2670                 found = "%s in incoming" % (dsc_name)
2671
2672                 # Check the file does not already exist in the archive
2673                 ql = get_poolfile_like_name(dsc_name, session)
2674
2675                 # Strip out anything that isn't '%s' or '/%s$'
2676                 for i in ql:
2677                     if not i.filename.endswith(dsc_name):
2678                         ql.remove(i)
2679
2680                 # "[dak] has not broken them.  [dak] has fixed a
2681                 # brokenness.  Your crappy hack exploited a bug in
2682                 # the old dinstall.
2683                 #
2684                 # "(Come on!  I thought it was always obvious that
2685                 # one just doesn't release different files with
2686                 # the same name and version.)"
2687                 #                        -- ajk@ on d-devel@l.d.o
2688
2689                 if len(ql) > 0:
2690                     # Ignore exact matches for .orig.tar.gz
2691                     match = 0
2692                     if re_is_orig_source.match(dsc_name):
2693                         for i in ql:
2694                             if self.pkg.files.has_key(dsc_name) and \
2695                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2696                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2697                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2698                                 # TODO: Don't delete the entry, just mark it as not needed
2699                                 # This would fix the stupidity of changing something we often iterate over
2700                                 # whilst we're doing it
2701                                 del self.pkg.files[dsc_name]
2702                                 dsc_entry["files id"] = i.file_id
2703                                 if not orig_files.has_key(dsc_name):
2704                                     orig_files[dsc_name] = {}
2705                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2706                                 match = 1
2707
2708                                 # Don't bitch that we couldn't find this file later
2709                                 try:
2710                                     self.later_check_files.remove(dsc_name)
2711                                 except ValueError:
2712                                     pass
2713
2714
2715                     if not match:
2716                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2717
2718             elif re_is_orig_source.match(dsc_name):
2719                 # Check in the pool
2720                 ql = get_poolfile_like_name(dsc_name, session)
2721
2722                 # Strip out anything that isn't '%s' or '/%s$'
2723                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2724                 for i in ql:
2725                     if not i.filename.endswith(dsc_name):
2726                         ql.remove(i)
2727
2728                 if len(ql) > 0:
2729                     # Unfortunately, we may get more than one match here if,
2730                     # for example, the package was in potato but had an -sa
2731                     # upload in woody.  So we need to choose the right one.
2732
2733                     # default to something sane in case we don't match any or have only one
2734                     x = ql[0]
2735
2736                     if len(ql) > 1:
2737                         for i in ql:
2738                             old_file = os.path.join(i.location.path, i.filename)
2739                             old_file_fh = utils.open_file(old_file)
2740                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2741                             old_file_fh.close()
2742                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2743                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2744                                 x = i
2745
2746                     old_file = os.path.join(i.location.path, i.filename)
2747                     old_file_fh = utils.open_file(old_file)
2748                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2749                     old_file_fh.close()
2750                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2751                     found = old_file
2752                     suite_type = x.location.archive_type
2753                     # need this for updating dsc_files in install()
2754                     dsc_entry["files id"] = x.file_id
2755                     # See install() in process-accepted...
2756                     if not orig_files.has_key(dsc_name):
2757                         orig_files[dsc_name] = {}
2758                     orig_files[dsc_name]["id"] = x.file_id
2759                     orig_files[dsc_name]["path"] = old_file
2760                     orig_files[dsc_name]["location"] = x.location.location_id
2761                 else:
2762                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2763                     # Not there? Check the queue directories...
2764                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2765                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2766                             continue
2767                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2768                         if os.path.exists(in_otherdir):
2769                             in_otherdir_fh = utils.open_file(in_otherdir)
2770                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2771                             in_otherdir_fh.close()
2772                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2773                             found = in_otherdir
2774                             if not orig_files.has_key(dsc_name):
2775                                 orig_files[dsc_name] = {}
2776                             orig_files[dsc_name]["path"] = in_otherdir
2777
2778                     if not found:
2779                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2780                         continue
2781             else:
2782                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2783                 continue
2784             if actual_md5 != dsc_entry["md5sum"]:
2785                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2786             if actual_size != int(dsc_entry["size"]):
2787                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2788
2789     ################################################################################
2790     # This is used by process-new and process-holding to recheck a changes file
2791     # at the time we're running.  It mainly wraps various other internal functions
2792     # and is similar to accepted_checks - these should probably be tidied up
2793     # and combined
2794     def recheck(self, session):
2795         cnf = Config()
2796         for f in self.pkg.files.keys():
2797             # The .orig.tar.gz can disappear out from under us is it's a
2798             # duplicate of one in the archive.
2799             if not self.pkg.files.has_key(f):
2800                 continue
2801
2802             entry = self.pkg.files[f]
2803
2804             # Check that the source still exists
2805             if entry["type"] == "deb":
2806                 source_version = entry["source version"]
2807                 source_package = entry["source package"]
2808                 if not self.pkg.changes["architecture"].has_key("source") \
2809                    and not source_exists(source_package, source_version, \
2810                     suites = self.pkg.changes["distribution"].keys(), session = session):
2811                     source_epochless_version = re_no_epoch.sub('', source_version)
2812                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2813                     found = False
2814                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2815                         if cnf.has_key("Dir::Queue::%s" % (q)):
2816                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2817                                 found = True
2818                     if not found:
2819                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2820
2821             # Version and file overwrite checks
2822             if entry["type"] == "deb":
2823                 self.check_binary_against_db(f, session)
2824             elif entry["type"] == "dsc":
2825                 self.check_source_against_db(f, session)
2826                 self.check_dsc_against_db(f, session)
2827
2828     ################################################################################
2829     def accepted_checks(self, overwrite_checks, session):
2830         # Recheck anything that relies on the database; since that's not
2831         # frozen between accept and our run time when called from p-a.
2832
2833         # overwrite_checks is set to False when installing to stable/oldstable
2834
2835         propogate={}
2836         nopropogate={}
2837
2838         # Find the .dsc (again)
2839         dsc_filename = None
2840         for f in self.pkg.files.keys():
2841             if self.pkg.files[f]["type"] == "dsc":
2842                 dsc_filename = f
2843
2844         for checkfile in self.pkg.files.keys():
2845             # The .orig.tar.gz can disappear out from under us is it's a
2846             # duplicate of one in the archive.
2847             if not self.pkg.files.has_key(checkfile):
2848                 continue
2849
2850             entry = self.pkg.files[checkfile]
2851
2852             # Check that the source still exists
2853             if entry["type"] == "deb":
2854                 source_version = entry["source version"]
2855                 source_package = entry["source package"]
2856                 if not self.pkg.changes["architecture"].has_key("source") \
2857                    and not source_exists(source_package, source_version, \
2858                     suites = self.pkg.changes["distribution"].keys(), \
2859                     session = session):
2860                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2861
2862             # Version and file overwrite checks
2863             if overwrite_checks:
2864                 if entry["type"] == "deb":
2865                     self.check_binary_against_db(checkfile, session)
2866                 elif entry["type"] == "dsc":
2867                     self.check_source_against_db(checkfile, session)
2868                     self.check_dsc_against_db(dsc_filename, session)
2869
2870             # propogate in the case it is in the override tables:
2871             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2872                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2873                     propogate[suite] = 1
2874                 else:
2875                     nopropogate[suite] = 1
2876
2877         for suite in propogate.keys():
2878             if suite in nopropogate:
2879                 continue
2880             self.pkg.changes["distribution"][suite] = 1
2881
2882         for checkfile in self.pkg.files.keys():
2883             # Check the package is still in the override tables
2884             for suite in self.pkg.changes["distribution"].keys():
2885                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2886                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2887
2888     ################################################################################
2889     # If any file of an upload has a recent mtime then chances are good
2890     # the file is still being uploaded.
2891
2892     def upload_too_new(self):
2893         cnf = Config()
2894         too_new = False
2895         # Move back to the original directory to get accurate time stamps
2896         cwd = os.getcwd()
2897         os.chdir(self.pkg.directory)
2898         file_list = self.pkg.files.keys()
2899         file_list.extend(self.pkg.dsc_files.keys())
2900         file_list.append(self.pkg.changes_file)
2901         for f in file_list:
2902             try:
2903                 last_modified = time.time()-os.path.getmtime(f)
2904                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2905                     too_new = True
2906                     break
2907             except:
2908                 pass
2909
2910         os.chdir(cwd)
2911         return too_new
2912
2913     def store_changelog(self):
2914
2915         # Skip binary-only upload if it is not a bin-NMU
2916         if not self.pkg.changes['architecture'].has_key('source'):
2917             from daklib.regexes import re_bin_only_nmu
2918             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2919                 return
2920
2921         session = DBConn().session()
2922
2923         # Check if upload already has a changelog entry
2924         query = """SELECT changelog_id FROM changes WHERE source = :source
2925                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2926         if session.execute(query, {'source': self.pkg.changes['source'], \
2927                                    'version': self.pkg.changes['version'], \
2928                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2929             session.commit()
2930             return
2931
2932         # Add current changelog text into changelogs_text table, return created ID
2933         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2934         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2935
2936         # Link ID to the upload available in changes table
2937         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2938                    AND version = :version AND architecture = :architecture"""
2939         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2940                                 'version': self.pkg.changes['version'], \
2941                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2942
2943         session.commit()