]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote-tracking branch 'origin/master' into build-queues
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
91         utils.warn('unreadable source file (will continue and hope for the best)')
92         return f["type"]
93     else:
94         file_type = f["type"]
95         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
96
97     # Validate the override type
98     type_id = get_override_type(file_type, session)
99     if type_id is None:
100         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
101
102     return file_type
103
104 ################################################################################
105
106 # Determine what parts in a .changes are NEW
107
108 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
109     """
110     Determine what parts in a C{changes} file are NEW.
111
112     @type filename: str
113     @param filename: changes filename
114
115     @type changes: Upload.Pkg.changes dict
116     @param changes: Changes dictionary
117
118     @type files: Upload.Pkg.files dict
119     @param files: Files dictionary
120
121     @type warn: bool
122     @param warn: Warn if overrides are added for (old)stable
123
124     @type dsc: Upload.Pkg.dsc dict
125     @param dsc: (optional); Dsc dictionary
126
127     @type new: dict
128     @param new: new packages as returned by a previous call to this function, but override information may have changed
129
130     @rtype: dict
131     @return: dictionary of NEW components.
132
133     """
134     # TODO: This should all use the database instead of parsing the changes
135     # file again
136     byhand = {}
137
138     dbchg = get_dbchange(filename, session)
139     if dbchg is None:
140         print "Warning: cannot find changes file in database; won't check byhand"
141
142     # Try to get the Package-Set field from an included .dsc file (if possible).
143     if dsc:
144         for package, entry in build_package_set(dsc, session).items():
145             if not new.has_key(package):
146                 new[package] = entry
147
148     # Build up a list of potentially new things
149     for name, f in files.items():
150         # Keep a record of byhand elements
151         if f["section"] == "byhand":
152             byhand[name] = 1
153             continue
154
155         pkg = f["package"]
156         priority = f["priority"]
157         section = f["section"]
158         file_type = get_type(f, session)
159         component = f["component"]
160
161         if file_type == "dsc":
162             priority = "source"
163
164         if not new.has_key(pkg):
165             new[pkg] = {}
166             new[pkg]["priority"] = priority
167             new[pkg]["section"] = section
168             new[pkg]["type"] = file_type
169             new[pkg]["component"] = component
170             new[pkg]["files"] = []
171         else:
172             old_type = new[pkg]["type"]
173             if old_type != file_type:
174                 # source gets trumped by deb or udeb
175                 if old_type == "dsc":
176                     new[pkg]["priority"] = priority
177                     new[pkg]["section"] = section
178                     new[pkg]["type"] = file_type
179                     new[pkg]["component"] = component
180
181         new[pkg]["files"].append(name)
182
183         if f.has_key("othercomponents"):
184             new[pkg]["othercomponents"] = f["othercomponents"]
185
186     # Fix up the list of target suites
187     cnf = Config()
188     for suite in changes["suite"].keys():
189         oldsuite = get_suite(suite, session)
190         if not oldsuite:
191             print "WARNING: Invalid suite %s found" % suite
192             continue
193
194         if oldsuite.overridesuite:
195             newsuite = get_suite(oldsuite.overridesuite, session)
196
197             if newsuite:
198                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
199                     oldsuite.overridesuite, suite)
200                 del changes["suite"][suite]
201                 changes["suite"][oldsuite.overridesuite] = 1
202             else:
203                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
204                     oldsuite.overridesuite, suite)
205
206     # Check for unprocessed byhand files
207     if dbchg is not None:
208         for b in byhand.keys():
209             # Find the file entry in the database
210             found = False
211             for f in dbchg.files:
212                 if f.filename == b:
213                     found = True
214                     # If it's processed, we can ignore it
215                     if f.processed:
216                         del byhand[b]
217                     break
218
219             if not found:
220                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
221
222     # Check for new stuff
223     for suite in changes["suite"].keys():
224         for pkg in new.keys():
225             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
226             if len(ql) > 0:
227                 for file_entry in new[pkg]["files"]:
228                     if files[file_entry].has_key("new"):
229                         del files[file_entry]["new"]
230                 del new[pkg]
231
232     if warn:
233         for s in ['stable', 'oldstable']:
234             if changes["suite"].has_key(s):
235                 print "WARNING: overrides will be added for %s!" % s
236         for pkg in new.keys():
237             if new[pkg].has_key("othercomponents"):
238                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
239
240     return new, byhand
241
242 ################################################################################
243
244 def check_valid(new, session = None):
245     """
246     Check if section and priority for NEW packages exist in database.
247     Additionally does sanity checks:
248       - debian-installer packages have to be udeb (or source)
249       - non debian-installer packages can not be udeb
250       - source priority can only be assigned to dsc file types
251
252     @type new: dict
253     @param new: Dict of new packages with their section, priority and type.
254
255     """
256     for pkg in new.keys():
257         section_name = new[pkg]["section"]
258         priority_name = new[pkg]["priority"]
259         file_type = new[pkg]["type"]
260
261         section = get_section(section_name, session)
262         if section is None:
263             new[pkg]["section id"] = -1
264         else:
265             new[pkg]["section id"] = section.section_id
266
267         priority = get_priority(priority_name, session)
268         if priority is None:
269             new[pkg]["priority id"] = -1
270         else:
271             new[pkg]["priority id"] = priority.priority_id
272
273         # Sanity checks
274         di = section_name.find("debian-installer") != -1
275
276         # If d-i, we must be udeb and vice-versa
277         if     (di and file_type not in ("udeb", "dsc")) or \
278            (not di and file_type == "udeb"):
279             new[pkg]["section id"] = -1
280
281         # If dsc we need to be source and vice-versa
282         if (priority == "source" and file_type != "dsc") or \
283            (priority != "source" and file_type == "dsc"):
284             new[pkg]["priority id"] = -1
285
286 ###############################################################################
287
288 # Used by Upload.check_timestamps
289 class TarTime(object):
290     def __init__(self, future_cutoff, past_cutoff):
291         self.reset()
292         self.future_cutoff = future_cutoff
293         self.past_cutoff = past_cutoff
294
295     def reset(self):
296         self.future_files = {}
297         self.ancient_files = {}
298
299     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
300         if MTime > self.future_cutoff:
301             self.future_files[Name] = MTime
302         if MTime < self.past_cutoff:
303             self.ancient_files[Name] = MTime
304
305 ###############################################################################
306
307 def prod_maintainer(notes, upload):
308     cnf = Config()
309
310     # Here we prepare an editor and get them ready to prod...
311     (fd, temp_filename) = utils.temp_filename()
312     temp_file = os.fdopen(fd, 'w')
313     for note in notes:
314         temp_file.write(note.comment)
315     temp_file.close()
316     editor = os.environ.get("EDITOR","vi")
317     answer = 'E'
318     while answer == 'E':
319         os.system("%s %s" % (editor, temp_filename))
320         temp_fh = utils.open_file(temp_filename)
321         prod_message = "".join(temp_fh.readlines())
322         temp_fh.close()
323         print "Prod message:"
324         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
325         prompt = "[P]rod, Edit, Abandon, Quit ?"
326         answer = "XXX"
327         while prompt.find(answer) == -1:
328             answer = utils.our_raw_input(prompt)
329             m = re_default_answer.search(prompt)
330             if answer == "":
331                 answer = m.group(1)
332             answer = answer[:1].upper()
333     os.unlink(temp_filename)
334     if answer == 'A':
335         return
336     elif answer == 'Q':
337         end()
338         sys.exit(0)
339     # Otherwise, do the proding...
340     user_email_address = utils.whoami() + " <%s>" % (
341         cnf["Dinstall::MyAdminAddress"])
342
343     Subst = upload.Subst
344
345     Subst["__FROM_ADDRESS__"] = user_email_address
346     Subst["__PROD_MESSAGE__"] = prod_message
347     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
348
349     prod_mail_message = utils.TemplateSubst(
350         Subst,cnf["Dir::Templates"]+"/process-new.prod")
351
352     # Send the prod mail
353     utils.send_mail(prod_mail_message)
354
355     print "Sent prodding message"
356
357 ################################################################################
358
359 def edit_note(note, upload, session, trainee=False):
360     # Write the current data to a temporary file
361     (fd, temp_filename) = utils.temp_filename()
362     editor = os.environ.get("EDITOR","vi")
363     answer = 'E'
364     while answer == 'E':
365         os.system("%s %s" % (editor, temp_filename))
366         temp_file = utils.open_file(temp_filename)
367         newnote = temp_file.read().rstrip()
368         temp_file.close()
369         print "New Note:"
370         print utils.prefix_multi_line_string(newnote,"  ")
371         prompt = "[D]one, Edit, Abandon, Quit ?"
372         answer = "XXX"
373         while prompt.find(answer) == -1:
374             answer = utils.our_raw_input(prompt)
375             m = re_default_answer.search(prompt)
376             if answer == "":
377                 answer = m.group(1)
378             answer = answer[:1].upper()
379     os.unlink(temp_filename)
380     if answer == 'A':
381         return
382     elif answer == 'Q':
383         end()
384         sys.exit(0)
385
386     comment = NewComment()
387     comment.package = upload.pkg.changes["source"]
388     comment.version = upload.pkg.changes["version"]
389     comment.comment = newnote
390     comment.author  = utils.whoami()
391     comment.trainee = trainee
392     session.add(comment)
393     session.commit()
394
395 ###############################################################################
396
397 # suite names DMs can upload to
398 dm_suites = ['unstable', 'experimental']
399
400 def get_newest_source(source, session):
401     'returns the newest DBSource object in dm_suites'
402     ## the most recent version of the package uploaded to unstable or
403     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
404     ## section of its control file
405     q = session.query(DBSource).filter_by(source = source). \
406         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
407         order_by(desc('source.version'))
408     return q.first()
409
410 def get_suite_version_by_source(source, session):
411     'returns a list of tuples (suite_name, version) for source package'
412     q = session.query(Suite.suite_name, DBSource.version). \
413         join(Suite.sources).filter_by(source = source)
414     return q.all()
415
416 def get_source_by_package_and_suite(package, suite_name, session):
417     '''
418     returns a DBSource query filtered by DBBinary.package and this package's
419     suite_name
420     '''
421     return session.query(DBSource). \
422         join(DBSource.binaries).filter_by(package = package). \
423         join(DBBinary.suites).filter_by(suite_name = suite_name)
424
425 def get_suite_version_by_package(package, arch_string, session):
426     '''
427     returns a list of tuples (suite_name, version) for binary package and
428     arch_string
429     '''
430     return session.query(Suite.suite_name, DBBinary.version). \
431         join(Suite.binaries).filter_by(package = package). \
432         join(DBBinary.architecture). \
433         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
434
435 class Upload(object):
436     """
437     Everything that has to do with an upload processed.
438
439     """
440     def __init__(self):
441         self.logger = None
442         self.pkg = Changes()
443         self.reset()
444
445     ###########################################################################
446
447     def reset (self):
448         """ Reset a number of internal variables."""
449
450         # Initialize the substitution template map
451         cnf = Config()
452         self.Subst = {}
453         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
454         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
455         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
456         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
457
458         self.rejects = []
459         self.warnings = []
460         self.notes = []
461
462         self.later_check_files = []
463
464         self.pkg.reset()
465
466     def package_info(self):
467         """
468         Format various messages from this Upload to send to the maintainer.
469         """
470
471         msgs = (
472             ('Reject Reasons', self.rejects),
473             ('Warnings', self.warnings),
474             ('Notes', self.notes),
475         )
476
477         msg = ''
478         for title, messages in msgs:
479             if messages:
480                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
481         msg += '\n\n'
482
483         return msg
484
485     ###########################################################################
486     def update_subst(self):
487         """ Set up the per-package template substitution mappings """
488
489         cnf = Config()
490
491         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
492         if not self.pkg.changes.has_key("architecture") or not \
493            isinstance(self.pkg.changes["architecture"], dict):
494             self.pkg.changes["architecture"] = { "Unknown" : "" }
495
496         # and maintainer2047 may not exist.
497         if not self.pkg.changes.has_key("maintainer2047"):
498             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
499
500         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
501         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
502         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
503
504         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
505         if self.pkg.changes["architecture"].has_key("source") and \
506            self.pkg.changes["changedby822"] != "" and \
507            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
508
509             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
510             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
511             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
512         else:
513             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
514             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
515             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
516
517         # Process policy doesn't set the fingerprint field and I don't want to make it
518         # do it for now as I don't want to have to deal with the case where we accepted
519         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
520         # the meantime so the package will be remarked as rejectable.  Urgh.
521         # TODO: Fix this properly
522         if self.pkg.changes.has_key('fingerprint'):
523             session = DBConn().session()
524             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
525             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
526                 if self.pkg.changes.has_key("sponsoremail"):
527                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
528             session.close()
529
530         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
531             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
532
533         # Apply any global override of the Maintainer field
534         if cnf.get("Dinstall::OverrideMaintainer"):
535             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
536             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
537
538         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
539         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
540         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
541         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
542
543     ###########################################################################
544     def load_changes(self, filename):
545         """
546         Load a changes file and setup a dictionary around it. Also checks for mandantory
547         fields  within.
548
549         @type filename: string
550         @param filename: Changes filename, full path.
551
552         @rtype: boolean
553         @return: whether the changes file was valid or not.  We may want to
554                  reject even if this is True (see what gets put in self.rejects).
555                  This is simply to prevent us even trying things later which will
556                  fail because we couldn't properly parse the file.
557         """
558         Cnf = Config()
559         self.pkg.changes_file = filename
560
561         # Parse the .changes field into a dictionary
562         try:
563             self.pkg.changes.update(parse_changes(filename))
564         except CantOpenError:
565             self.rejects.append("%s: can't read file." % (filename))
566             return False
567         except ParseChangesError, line:
568             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
569             return False
570         except ChangesUnicodeError:
571             self.rejects.append("%s: changes file not proper utf-8" % (filename))
572             return False
573
574         # Parse the Files field from the .changes into another dictionary
575         try:
576             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
577         except ParseChangesError, line:
578             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
579             return False
580         except UnknownFormatError, format:
581             self.rejects.append("%s: unknown format '%s'." % (filename, format))
582             return False
583
584         # Check for mandatory fields
585         for i in ("distribution", "source", "binary", "architecture",
586                   "version", "maintainer", "files", "changes", "description"):
587             if not self.pkg.changes.has_key(i):
588                 # Avoid undefined errors later
589                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
590                 return False
591
592         # Strip a source version in brackets from the source field
593         if re_strip_srcver.search(self.pkg.changes["source"]):
594             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
595
596         # Ensure the source field is a valid package name.
597         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
598             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
599
600         # Split multi-value fields into a lower-level dictionary
601         for i in ("architecture", "distribution", "binary", "closes"):
602             o = self.pkg.changes.get(i, "")
603             if o != "":
604                 del self.pkg.changes[i]
605
606             self.pkg.changes[i] = {}
607
608             for j in o.split():
609                 self.pkg.changes[i][j] = 1
610
611         # Fix the Maintainer: field to be RFC822/2047 compatible
612         try:
613             (self.pkg.changes["maintainer822"],
614              self.pkg.changes["maintainer2047"],
615              self.pkg.changes["maintainername"],
616              self.pkg.changes["maintaineremail"]) = \
617                    fix_maintainer (self.pkg.changes["maintainer"])
618         except ParseMaintError, msg:
619             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
620                    % (filename, self.pkg.changes["maintainer"], msg))
621
622         # ...likewise for the Changed-By: field if it exists.
623         try:
624             (self.pkg.changes["changedby822"],
625              self.pkg.changes["changedby2047"],
626              self.pkg.changes["changedbyname"],
627              self.pkg.changes["changedbyemail"]) = \
628                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
629         except ParseMaintError, msg:
630             self.pkg.changes["changedby822"] = ""
631             self.pkg.changes["changedby2047"] = ""
632             self.pkg.changes["changedbyname"] = ""
633             self.pkg.changes["changedbyemail"] = ""
634
635             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
636                    % (filename, self.pkg.changes["changed-by"], msg))
637
638         # Ensure all the values in Closes: are numbers
639         if self.pkg.changes.has_key("closes"):
640             for i in self.pkg.changes["closes"].keys():
641                 if re_isanum.match (i) == None:
642                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
643
644         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
645         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
646         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
647
648         # Check the .changes is non-empty
649         if not self.pkg.files:
650             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
651             return False
652
653         # Changes was syntactically valid even if we'll reject
654         return True
655
656     ###########################################################################
657
658     def check_distributions(self):
659         "Check and map the Distribution field"
660
661         Cnf = Config()
662
663         # Handle suite mappings
664         for m in Cnf.ValueList("SuiteMappings"):
665             args = m.split()
666             mtype = args[0]
667             if mtype == "map" or mtype == "silent-map":
668                 (source, dest) = args[1:3]
669                 if self.pkg.changes["distribution"].has_key(source):
670                     del self.pkg.changes["distribution"][source]
671                     self.pkg.changes["distribution"][dest] = 1
672                     if mtype != "silent-map":
673                         self.notes.append("Mapping %s to %s." % (source, dest))
674                 if self.pkg.changes.has_key("distribution-version"):
675                     if self.pkg.changes["distribution-version"].has_key(source):
676                         self.pkg.changes["distribution-version"][source]=dest
677             elif mtype == "map-unreleased":
678                 (source, dest) = args[1:3]
679                 if self.pkg.changes["distribution"].has_key(source):
680                     for arch in self.pkg.changes["architecture"].keys():
681                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
682                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
683                             del self.pkg.changes["distribution"][source]
684                             self.pkg.changes["distribution"][dest] = 1
685                             break
686             elif mtype == "ignore":
687                 suite = args[1]
688                 if self.pkg.changes["distribution"].has_key(suite):
689                     del self.pkg.changes["distribution"][suite]
690                     self.warnings.append("Ignoring %s as a target suite." % (suite))
691             elif mtype == "reject":
692                 suite = args[1]
693                 if self.pkg.changes["distribution"].has_key(suite):
694                     self.rejects.append("Uploads to %s are not accepted." % (suite))
695             elif mtype == "propup-version":
696                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
697                 #
698                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
699                 if self.pkg.changes["distribution"].has_key(args[1]):
700                     self.pkg.changes.setdefault("distribution-version", {})
701                     for suite in args[2:]:
702                         self.pkg.changes["distribution-version"][suite] = suite
703
704         # Ensure there is (still) a target distribution
705         if len(self.pkg.changes["distribution"].keys()) < 1:
706             self.rejects.append("No valid distribution remaining.")
707
708         # Ensure target distributions exist
709         for suite in self.pkg.changes["distribution"].keys():
710             if not Cnf.has_key("Suite::%s" % (suite)):
711                 self.rejects.append("Unknown distribution `%s'." % (suite))
712
713     ###########################################################################
714
715     def binary_file_checks(self, f, session):
716         cnf = Config()
717         entry = self.pkg.files[f]
718
719         # Extract package control information
720         deb_file = utils.open_file(f)
721         try:
722             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
723         except:
724             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
725             deb_file.close()
726             # Can't continue, none of the checks on control would work.
727             return
728
729         # Check for mandantory "Description:"
730         deb_file.seek(0)
731         try:
732             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
733         except:
734             self.rejects.append("%s: Missing Description in binary package" % (f))
735             return
736
737         deb_file.close()
738
739         # Check for mandatory fields
740         for field in [ "Package", "Architecture", "Version" ]:
741             if control.Find(field) == None:
742                 # Can't continue
743                 self.rejects.append("%s: No %s field in control." % (f, field))
744                 return
745
746         # Ensure the package name matches the one give in the .changes
747         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
748             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
749
750         # Validate the package field
751         package = control.Find("Package")
752         if not re_valid_pkg_name.match(package):
753             self.rejects.append("%s: invalid package name '%s'." % (f, package))
754
755         # Validate the version field
756         version = control.Find("Version")
757         if not re_valid_version.match(version):
758             self.rejects.append("%s: invalid version number '%s'." % (f, version))
759
760         # Ensure the architecture of the .deb is one we know about.
761         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
762         architecture = control.Find("Architecture")
763         upload_suite = self.pkg.changes["distribution"].keys()[0]
764
765         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
766             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
767             self.rejects.append("Unknown architecture '%s'." % (architecture))
768
769         # Ensure the architecture of the .deb is one of the ones
770         # listed in the .changes.
771         if not self.pkg.changes["architecture"].has_key(architecture):
772             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
773
774         # Sanity-check the Depends field
775         depends = control.Find("Depends")
776         if depends == '':
777             self.rejects.append("%s: Depends field is empty." % (f))
778
779         # Sanity-check the Provides field
780         provides = control.Find("Provides")
781         if provides:
782             provide = re_spacestrip.sub('', provides)
783             if provide == '':
784                 self.rejects.append("%s: Provides field is empty." % (f))
785             prov_list = provide.split(",")
786             for prov in prov_list:
787                 if not re_valid_pkg_name.match(prov):
788                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
789
790         # If there is a Built-Using field, we need to check we can find the
791         # exact source version
792         built_using = control.Find("Built-Using")
793         if built_using:
794             try:
795                 entry["built-using"] = []
796                 for dep in apt_pkg.parse_depends(built_using):
797                     bu_s, bu_v, bu_e = dep[0]
798                     # Check that it's an exact match dependency and we have
799                     # some form of version
800                     if bu_e != "=" or len(bu_v) < 1:
801                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
802                     else:
803                         # Find the source id for this version
804                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
805                         if len(bu_so) != 1:
806                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
807                         else:
808                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
809
810             except ValueError, e:
811                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
812
813
814         # Check the section & priority match those given in the .changes (non-fatal)
815         if     control.Find("Section") and entry["section"] != "" \
816            and entry["section"] != control.Find("Section"):
817             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
818                                 (f, control.Find("Section", ""), entry["section"]))
819         if control.Find("Priority") and entry["priority"] != "" \
820            and entry["priority"] != control.Find("Priority"):
821             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
822                                 (f, control.Find("Priority", ""), entry["priority"]))
823
824         entry["package"] = package
825         entry["architecture"] = architecture
826         entry["version"] = version
827         entry["maintainer"] = control.Find("Maintainer", "")
828
829         if f.endswith(".udeb"):
830             self.pkg.files[f]["dbtype"] = "udeb"
831         elif f.endswith(".deb"):
832             self.pkg.files[f]["dbtype"] = "deb"
833         else:
834             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
835
836         entry["source"] = control.Find("Source", entry["package"])
837
838         # Get the source version
839         source = entry["source"]
840         source_version = ""
841
842         if source.find("(") != -1:
843             m = re_extract_src_version.match(source)
844             source = m.group(1)
845             source_version = m.group(2)
846
847         if not source_version:
848             source_version = self.pkg.files[f]["version"]
849
850         entry["source package"] = source
851         entry["source version"] = source_version
852
853         # Ensure the filename matches the contents of the .deb
854         m = re_isadeb.match(f)
855
856         #  package name
857         file_package = m.group(1)
858         if entry["package"] != file_package:
859             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
860                                 (f, file_package, entry["dbtype"], entry["package"]))
861         epochless_version = re_no_epoch.sub('', control.Find("Version"))
862
863         #  version
864         file_version = m.group(2)
865         if epochless_version != file_version:
866             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
867                                 (f, file_version, entry["dbtype"], epochless_version))
868
869         #  architecture
870         file_architecture = m.group(3)
871         if entry["architecture"] != file_architecture:
872             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
873                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
874
875         # Check for existent source
876         source_version = entry["source version"]
877         source_package = entry["source package"]
878         if self.pkg.changes["architecture"].has_key("source"):
879             if source_version != self.pkg.changes["version"]:
880                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
881                                     (source_version, f, self.pkg.changes["version"]))
882         else:
883             # Check in the SQL database
884             if not source_exists(source_package, source_version, suites = \
885                 self.pkg.changes["distribution"].keys(), session = session):
886                 # Check in one of the other directories
887                 source_epochless_version = re_no_epoch.sub('', source_version)
888                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
889                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
890                     entry["byhand"] = 1
891                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
892                     entry["new"] = 1
893                 else:
894                     dsc_file_exists = False
895                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
896                         if cnf.has_key("Dir::Queue::%s" % (myq)):
897                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
898                                 dsc_file_exists = True
899                                 break
900
901                     if not dsc_file_exists:
902                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
903
904         # Check the version and for file overwrites
905         self.check_binary_against_db(f, session)
906
907     def source_file_checks(self, f, session):
908         entry = self.pkg.files[f]
909
910         m = re_issource.match(f)
911         if not m:
912             return
913
914         entry["package"] = m.group(1)
915         entry["version"] = m.group(2)
916         entry["type"] = m.group(3)
917
918         # Ensure the source package name matches the Source filed in the .changes
919         if self.pkg.changes["source"] != entry["package"]:
920             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
921
922         # Ensure the source version matches the version in the .changes file
923         if re_is_orig_source.match(f):
924             changes_version = self.pkg.changes["chopversion2"]
925         else:
926             changes_version = self.pkg.changes["chopversion"]
927
928         if changes_version != entry["version"]:
929             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
930
931         # Ensure the .changes lists source in the Architecture field
932         if not self.pkg.changes["architecture"].has_key("source"):
933             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
934
935         # Check the signature of a .dsc file
936         if entry["type"] == "dsc":
937             # check_signature returns either:
938             #  (None, [list, of, rejects]) or (signature, [])
939             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
940             for j in rejects:
941                 self.rejects.append(j)
942
943         entry["architecture"] = "source"
944
945     def per_suite_file_checks(self, f, suite, session):
946         cnf = Config()
947         entry = self.pkg.files[f]
948
949         # Skip byhand
950         if entry.has_key("byhand"):
951             return
952
953         # Check we have fields we need to do these checks
954         oktogo = True
955         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956             if not entry.has_key(m):
957                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
958                 oktogo = False
959
960         if not oktogo:
961             return
962
963         # Handle component mappings
964         for m in cnf.ValueList("ComponentMappings"):
965             (source, dest) = m.split()
966             if entry["component"] == source:
967                 entry["original component"] = source
968                 entry["component"] = dest
969
970         # Ensure the component is valid for the target suite
971         if cnf.has_key("Suite:%s::Components" % (suite)) and \
972            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
973             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
974             return
975
976         # Validate the component
977         if not get_component(entry["component"], session):
978             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
979             return
980
981         # See if the package is NEW
982         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
983             entry["new"] = 1
984
985         # Validate the priority
986         if entry["priority"].find('/') != -1:
987             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
988
989         # Determine the location
990         location = cnf["Dir::Pool"]
991         l = get_location(location, entry["component"], session=session)
992         if l is None:
993             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
994             entry["location id"] = -1
995         else:
996             entry["location id"] = l.location_id
997
998         # Check the md5sum & size against existing files (if any)
999         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1000
1001         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1002                                          entry["size"], entry["md5sum"], entry["location id"])
1003
1004         if found is None:
1005             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1006         elif found is False and poolfile is not None:
1007             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1008         else:
1009             if poolfile is None:
1010                 entry["files id"] = None
1011             else:
1012                 entry["files id"] = poolfile.file_id
1013
1014         # Check for packages that have moved from one component to another
1015         entry['suite'] = suite
1016         arch_list = [entry["architecture"], 'all']
1017         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1018             [suite], arch_list = arch_list, session = session)
1019         if component is not None:
1020             entry["othercomponents"] = component
1021
1022     def check_files(self, action=True):
1023         file_keys = self.pkg.files.keys()
1024         holding = Holding()
1025         cnf = Config()
1026
1027         if action:
1028             cwd = os.getcwd()
1029             os.chdir(self.pkg.directory)
1030             for f in file_keys:
1031                 ret = holding.copy_to_holding(f)
1032                 if ret is not None:
1033                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1034
1035             os.chdir(cwd)
1036
1037         # check we already know the changes file
1038         # [NB: this check must be done post-suite mapping]
1039         base_filename = os.path.basename(self.pkg.changes_file)
1040
1041         session = DBConn().session()
1042
1043         try:
1044             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1045             # if in the pool or in a queue other than unchecked, reject
1046             if (dbc.in_queue is None) \
1047                    or (dbc.in_queue is not None
1048                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1049                 self.rejects.append("%s file already known to dak" % base_filename)
1050         except NoResultFound, e:
1051             # not known, good
1052             pass
1053
1054         has_binaries = False
1055         has_source = False
1056
1057         for f, entry in self.pkg.files.items():
1058             # Ensure the file does not already exist in one of the accepted directories
1059             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1060                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1061                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1062                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1063
1064             if not re_taint_free.match(f):
1065                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1066
1067             # Check the file is readable
1068             if os.access(f, os.R_OK) == 0:
1069                 # When running in -n, copy_to_holding() won't have
1070                 # generated the reject_message, so we need to.
1071                 if action:
1072                     if os.path.exists(f):
1073                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1074                     else:
1075                         # Don't directly reject, mark to check later to deal with orig's
1076                         # we can find in the pool
1077                         self.later_check_files.append(f)
1078                 entry["type"] = "unreadable"
1079                 continue
1080
1081             # If it's byhand skip remaining checks
1082             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1083                 entry["byhand"] = 1
1084                 entry["type"] = "byhand"
1085
1086             # Checks for a binary package...
1087             elif re_isadeb.match(f):
1088                 has_binaries = True
1089                 entry["type"] = "deb"
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.binary_file_checks(f, session)
1093
1094             # Checks for a source package...
1095             elif re_issource.match(f):
1096                 has_source = True
1097
1098                 # This routine appends to self.rejects/warnings as appropriate
1099                 self.source_file_checks(f, session)
1100
1101             # Not a binary or source package?  Assume byhand...
1102             else:
1103                 entry["byhand"] = 1
1104                 entry["type"] = "byhand"
1105
1106             # Per-suite file checks
1107             entry["oldfiles"] = {}
1108             for suite in self.pkg.changes["distribution"].keys():
1109                 self.per_suite_file_checks(f, suite, session)
1110
1111         session.close()
1112
1113         # If the .changes file says it has source, it must have source.
1114         if self.pkg.changes["architecture"].has_key("source"):
1115             if not has_source:
1116                 self.rejects.append("no source found and Architecture line in changes mention source.")
1117
1118             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1119                 self.rejects.append("source only uploads are not supported.")
1120
1121     ###########################################################################
1122
1123     def __dsc_filename(self):
1124         """
1125         Returns: (Status, Dsc_Filename)
1126         where
1127           Status: Boolean; True when there was no error, False otherwise
1128           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1129         """
1130         dsc_filename = None
1131
1132         # find the dsc
1133         for name, entry in self.pkg.files.items():
1134             if entry.has_key("type") and entry["type"] == "dsc":
1135                 if dsc_filename:
1136                     return False, "cannot process a .changes file with multiple .dsc's."
1137                 else:
1138                     dsc_filename = name
1139
1140         if not dsc_filename:
1141             return False, "source uploads must contain a dsc file"
1142
1143         return True, dsc_filename
1144
1145     def load_dsc(self, action=True, signing_rules=1):
1146         """
1147         Find and load the dsc from self.pkg.files into self.dsc
1148
1149         Returns: (Status, Reason)
1150         where
1151           Status: Boolean; True when there was no error, False otherwise
1152           Reason: String; When Status is False this describes the error
1153         """
1154
1155         # find the dsc
1156         (status, dsc_filename) = self.__dsc_filename()
1157         if not status:
1158             # If status is false, dsc_filename has the reason
1159             return False, dsc_filename
1160
1161         try:
1162             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1163         except CantOpenError:
1164             if not action:
1165                 return False, "%s: can't read file." % (dsc_filename)
1166         except ParseChangesError, line:
1167             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1168         except InvalidDscError, line:
1169             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1170         except ChangesUnicodeError:
1171             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1172
1173         return True, None
1174
1175     ###########################################################################
1176
1177     def check_dsc(self, action=True, session=None):
1178         """Returns bool indicating whether or not the source changes are valid"""
1179         # Ensure there is source to check
1180         if not self.pkg.changes["architecture"].has_key("source"):
1181             return True
1182
1183         (status, reason) = self.load_dsc(action=action)
1184         if not status:
1185             self.rejects.append(reason)
1186             return False
1187         (status, dsc_filename) = self.__dsc_filename()
1188         if not status:
1189             # If status is false, dsc_filename has the reason
1190             self.rejects.append(dsc_filename)
1191             return False
1192
1193         # Build up the file list of files mentioned by the .dsc
1194         try:
1195             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1196         except NoFilesFieldError:
1197             self.rejects.append("%s: no Files: field." % (dsc_filename))
1198             return False
1199         except UnknownFormatError, format:
1200             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1201             return False
1202         except ParseChangesError, line:
1203             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1204             return False
1205
1206         # Enforce mandatory fields
1207         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1208             if not self.pkg.dsc.has_key(i):
1209                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1210                 return False
1211
1212         # Validate the source and version fields
1213         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1214             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1215         if not re_valid_version.match(self.pkg.dsc["version"]):
1216             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1217
1218         # Only a limited list of source formats are allowed in each suite
1219         for dist in self.pkg.changes["distribution"].keys():
1220             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1221             if self.pkg.dsc["format"] not in allowed:
1222                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1223
1224         # Validate the Maintainer field
1225         try:
1226             # We ignore the return value
1227             fix_maintainer(self.pkg.dsc["maintainer"])
1228         except ParseMaintError, msg:
1229             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1230                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1231
1232         # Validate the build-depends field(s)
1233         for field_name in [ "build-depends", "build-depends-indep" ]:
1234             field = self.pkg.dsc.get(field_name)
1235             if field:
1236                 # Have apt try to parse them...
1237                 try:
1238                     apt_pkg.ParseSrcDepends(field)
1239                 except:
1240                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1241
1242         # Ensure the version number in the .dsc matches the version number in the .changes
1243         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1244         changes_version = self.pkg.files[dsc_filename]["version"]
1245
1246         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1247             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1248
1249         # Ensure the Files field contain only what's expected
1250         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1251
1252         # Ensure source is newer than existing source in target suites
1253         session = DBConn().session()
1254         self.check_source_against_db(dsc_filename, session)
1255         self.check_dsc_against_db(dsc_filename, session)
1256
1257         dbchg = get_dbchange(self.pkg.changes_file, session)
1258
1259         # Finally, check if we're missing any files
1260         for f in self.later_check_files:
1261             print 'XXX: %s' % f
1262             # Check if we've already processed this file if we have a dbchg object
1263             ok = False
1264             if dbchg:
1265                 for pf in dbchg.files:
1266                     if pf.filename == f and pf.processed:
1267                         self.notes.append('%s was already processed so we can go ahead' % f)
1268                         ok = True
1269                         del self.pkg.files[f]
1270             if not ok:
1271                 self.rejects.append("Could not find file %s references in changes" % f)
1272
1273         session.close()
1274
1275         return True
1276
1277     ###########################################################################
1278
1279     def get_changelog_versions(self, source_dir):
1280         """Extracts a the source package and (optionally) grabs the
1281         version history out of debian/changelog for the BTS."""
1282
1283         cnf = Config()
1284
1285         # Find the .dsc (again)
1286         dsc_filename = None
1287         for f in self.pkg.files.keys():
1288             if self.pkg.files[f]["type"] == "dsc":
1289                 dsc_filename = f
1290
1291         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1292         if not dsc_filename:
1293             return
1294
1295         # Create a symlink mirror of the source files in our temporary directory
1296         for f in self.pkg.files.keys():
1297             m = re_issource.match(f)
1298             if m:
1299                 src = os.path.join(source_dir, f)
1300                 # If a file is missing for whatever reason, give up.
1301                 if not os.path.exists(src):
1302                     return
1303                 ftype = m.group(3)
1304                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1305                    self.pkg.orig_files[f].has_key("path"):
1306                     continue
1307                 dest = os.path.join(os.getcwd(), f)
1308                 os.symlink(src, dest)
1309
1310         # If the orig files are not a part of the upload, create symlinks to the
1311         # existing copies.
1312         for orig_file in self.pkg.orig_files.keys():
1313             if not self.pkg.orig_files[orig_file].has_key("path"):
1314                 continue
1315             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1316             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1317
1318         # Extract the source
1319         try:
1320             unpacked = UnpackedSource(dsc_filename)
1321         except:
1322             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1323             return
1324
1325         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1326             return
1327
1328         # Get the upstream version
1329         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1330         if re_strip_revision.search(upstr_version):
1331             upstr_version = re_strip_revision.sub('', upstr_version)
1332
1333         # Ensure the changelog file exists
1334         changelog_file = unpacked.get_changelog_file()
1335         if changelog_file is None:
1336             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1337             return
1338
1339         # Parse the changelog
1340         self.pkg.dsc["bts changelog"] = ""
1341         for line in changelog_file.readlines():
1342             m = re_changelog_versions.match(line)
1343             if m:
1344                 self.pkg.dsc["bts changelog"] += line
1345         changelog_file.close()
1346         unpacked.cleanup()
1347
1348         # Check we found at least one revision in the changelog
1349         if not self.pkg.dsc["bts changelog"]:
1350             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1351
1352     def check_source(self):
1353         # Bail out if:
1354         #    a) there's no source
1355         if not self.pkg.changes["architecture"].has_key("source"):
1356             return
1357
1358         tmpdir = utils.temp_dirname()
1359
1360         # Move into the temporary directory
1361         cwd = os.getcwd()
1362         os.chdir(tmpdir)
1363
1364         # Get the changelog version history
1365         self.get_changelog_versions(cwd)
1366
1367         # Move back and cleanup the temporary tree
1368         os.chdir(cwd)
1369
1370         try:
1371             shutil.rmtree(tmpdir)
1372         except OSError, e:
1373             if e.errno != errno.EACCES:
1374                 print "foobar"
1375                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1376
1377             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1378             # We probably have u-r or u-w directories so chmod everything
1379             # and try again.
1380             cmd = "chmod -R u+rwx %s" % (tmpdir)
1381             result = os.system(cmd)
1382             if result != 0:
1383                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1384             shutil.rmtree(tmpdir)
1385         except Exception, e:
1386             print "foobar2 (%s)" % e
1387             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1388
1389     ###########################################################################
1390     def ensure_hashes(self):
1391         # Make sure we recognise the format of the Files: field in the .changes
1392         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1393         if len(format) == 2:
1394             format = int(format[0]), int(format[1])
1395         else:
1396             format = int(float(format[0])), 0
1397
1398         # We need to deal with the original changes blob, as the fields we need
1399         # might not be in the changes dict serialised into the .dak anymore.
1400         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1401
1402         # Copy the checksums over to the current changes dict.  This will keep
1403         # the existing modifications to it intact.
1404         for field in orig_changes:
1405             if field.startswith('checksums-'):
1406                 self.pkg.changes[field] = orig_changes[field]
1407
1408         # Check for unsupported hashes
1409         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1410             self.rejects.append(j)
1411
1412         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1413             self.rejects.append(j)
1414
1415         # We have to calculate the hash if we have an earlier changes version than
1416         # the hash appears in rather than require it exist in the changes file
1417         for hashname, hashfunc, version in utils.known_hashes:
1418             # TODO: Move _ensure_changes_hash into this class
1419             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1420                 self.rejects.append(j)
1421             if "source" in self.pkg.changes["architecture"]:
1422                 # TODO: Move _ensure_dsc_hash into this class
1423                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1424                     self.rejects.append(j)
1425
1426     def check_hashes(self):
1427         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1428             self.rejects.append(m)
1429
1430         for m in utils.check_size(".changes", self.pkg.files):
1431             self.rejects.append(m)
1432
1433         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1434             self.rejects.append(m)
1435
1436         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1437             self.rejects.append(m)
1438
1439         self.ensure_hashes()
1440
1441     ###########################################################################
1442
1443     def ensure_orig(self, target_dir='.', session=None):
1444         """
1445         Ensures that all orig files mentioned in the changes file are present
1446         in target_dir. If they do not exist, they are symlinked into place.
1447
1448         An list containing the symlinks that were created are returned (so they
1449         can be removed).
1450         """
1451
1452         symlinked = []
1453         cnf = Config()
1454
1455         for filename, entry in self.pkg.dsc_files.iteritems():
1456             if not re_is_orig_source.match(filename):
1457                 # File is not an orig; ignore
1458                 continue
1459
1460             if os.path.exists(filename):
1461                 # File exists, no need to continue
1462                 continue
1463
1464             def symlink_if_valid(path):
1465                 f = utils.open_file(path)
1466                 md5sum = apt_pkg.md5sum(f)
1467                 f.close()
1468
1469                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1470                 expected = (int(entry['size']), entry['md5sum'])
1471
1472                 if fingerprint != expected:
1473                     return False
1474
1475                 dest = os.path.join(target_dir, filename)
1476
1477                 os.symlink(path, dest)
1478                 symlinked.append(dest)
1479
1480                 return True
1481
1482             session_ = session
1483             if session is None:
1484                 session_ = DBConn().session()
1485
1486             found = False
1487
1488             # Look in the pool
1489             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1490                 poolfile_path = os.path.join(
1491                     poolfile.location.path, poolfile.filename
1492                 )
1493
1494                 if symlink_if_valid(poolfile_path):
1495                     found = True
1496                     break
1497
1498             if session is None:
1499                 session_.close()
1500
1501             if found:
1502                 continue
1503
1504             # Look in some other queues for the file
1505             queues = ('New', 'Byhand', 'ProposedUpdates',
1506                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1507
1508             for queue in queues:
1509                 if not cnf.get('Dir::Queue::%s' % queue):
1510                     continue
1511
1512                 queuefile_path = os.path.join(
1513                     cnf['Dir::Queue::%s' % queue], filename
1514                 )
1515
1516                 if not os.path.exists(queuefile_path):
1517                     # Does not exist in this queue
1518                     continue
1519
1520                 if symlink_if_valid(queuefile_path):
1521                     break
1522
1523         return symlinked
1524
1525     ###########################################################################
1526
1527     def check_lintian(self):
1528         """
1529         Extends self.rejects by checking the output of lintian against tags
1530         specified in Dinstall::LintianTags.
1531         """
1532
1533         cnf = Config()
1534
1535         # Don't reject binary uploads
1536         if not self.pkg.changes['architecture'].has_key('source'):
1537             return
1538
1539         # Only check some distributions
1540         for dist in ('unstable', 'experimental'):
1541             if dist in self.pkg.changes['distribution']:
1542                 break
1543         else:
1544             return
1545
1546         # If we do not have a tagfile, don't do anything
1547         tagfile = cnf.get("Dinstall::LintianTags")
1548         if not tagfile:
1549             return
1550
1551         # Parse the yaml file
1552         sourcefile = file(tagfile, 'r')
1553         sourcecontent = sourcefile.read()
1554         sourcefile.close()
1555
1556         try:
1557             lintiantags = yaml.load(sourcecontent)['lintian']
1558         except yaml.YAMLError, msg:
1559             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1560             return
1561
1562         # Try and find all orig mentioned in the .dsc
1563         symlinked = self.ensure_orig()
1564
1565         # Setup the input file for lintian
1566         fd, temp_filename = utils.temp_filename()
1567         temptagfile = os.fdopen(fd, 'w')
1568         for tags in lintiantags.values():
1569             temptagfile.writelines(['%s\n' % x for x in tags])
1570         temptagfile.close()
1571
1572         try:
1573             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1574                 (temp_filename, self.pkg.changes_file)
1575
1576             result, output = commands.getstatusoutput(cmd)
1577         finally:
1578             # Remove our tempfile and any symlinks we created
1579             os.unlink(temp_filename)
1580
1581             for symlink in symlinked:
1582                 os.unlink(symlink)
1583
1584         if result == 2:
1585             utils.warn("lintian failed for %s [return code: %s]." % \
1586                 (self.pkg.changes_file, result))
1587             utils.warn(utils.prefix_multi_line_string(output, \
1588                 " [possible output:] "))
1589
1590         def log(*txt):
1591             if self.logger:
1592                 self.logger.log(
1593                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1594                 )
1595
1596         # Generate messages
1597         parsed_tags = parse_lintian_output(output)
1598         self.rejects.extend(
1599             generate_reject_messages(parsed_tags, lintiantags, log=log)
1600         )
1601
1602     ###########################################################################
1603     def check_urgency(self):
1604         cnf = Config()
1605         if self.pkg.changes["architecture"].has_key("source"):
1606             if not self.pkg.changes.has_key("urgency"):
1607                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1608             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1609             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1610                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1611                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1612                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1613
1614     ###########################################################################
1615
1616     # Sanity check the time stamps of files inside debs.
1617     # [Files in the near future cause ugly warnings and extreme time
1618     #  travel can cause errors on extraction]
1619
1620     def check_timestamps(self):
1621         Cnf = Config()
1622
1623         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1624         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1625         tar = TarTime(future_cutoff, past_cutoff)
1626
1627         for filename, entry in self.pkg.files.items():
1628             if entry["type"] == "deb":
1629                 tar.reset()
1630                 try:
1631                     deb_file = utils.open_file(filename)
1632                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1633                     deb_file.seek(0)
1634                     try:
1635                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1636                     except SystemError, e:
1637                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1638                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1639                             raise
1640                         deb_file.seek(0)
1641                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1642
1643                     deb_file.close()
1644
1645                     future_files = tar.future_files.keys()
1646                     if future_files:
1647                         num_future_files = len(future_files)
1648                         future_file = future_files[0]
1649                         future_date = tar.future_files[future_file]
1650                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1651                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1652
1653                     ancient_files = tar.ancient_files.keys()
1654                     if ancient_files:
1655                         num_ancient_files = len(ancient_files)
1656                         ancient_file = ancient_files[0]
1657                         ancient_date = tar.ancient_files[ancient_file]
1658                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1659                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1660                 except:
1661                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1662
1663     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1664         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1665             sponsored = False
1666         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1667             sponsored = False
1668             if uid_name == "":
1669                 sponsored = True
1670         else:
1671             sponsored = True
1672             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1673                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1674                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1675                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1676                         self.pkg.changes["sponsoremail"] = uid_email
1677
1678         return sponsored
1679
1680
1681     ###########################################################################
1682     # check_signed_by_key checks
1683     ###########################################################################
1684
1685     def check_signed_by_key(self):
1686         """Ensure the .changes is signed by an authorized uploader."""
1687         session = DBConn().session()
1688
1689         # First of all we check that the person has proper upload permissions
1690         # and that this upload isn't blocked
1691         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1692
1693         if fpr is None:
1694             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1695             return
1696
1697         # TODO: Check that import-keyring adds UIDs properly
1698         if not fpr.uid:
1699             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1700             return
1701
1702         # Check that the fingerprint which uploaded has permission to do so
1703         self.check_upload_permissions(fpr, session)
1704
1705         # Check that this package is not in a transition
1706         self.check_transition(session)
1707
1708         session.close()
1709
1710
1711     def check_upload_permissions(self, fpr, session):
1712         # Check any one-off upload blocks
1713         self.check_upload_blocks(fpr, session)
1714
1715         # If the source_acl is None, source is never allowed
1716         if fpr.source_acl is None:
1717             if self.pkg.changes["architecture"].has_key("source"):
1718                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1719                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1720                 self.rejects.append(rej)
1721                 return
1722         # Do DM as a special case
1723         # DM is a special case unfortunately, so we check it first
1724         # (keys with no source access get more access than DMs in one
1725         #  way; DMs can only upload for their packages whether source
1726         #  or binary, whereas keys with no access might be able to
1727         #  upload some binaries)
1728         elif fpr.source_acl.access_level == 'dm':
1729             self.check_dm_upload(fpr, session)
1730         else:
1731             # If not a DM, we allow full upload rights
1732             uid_email = "%s@debian.org" % (fpr.uid.uid)
1733             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1734
1735
1736         # Check binary upload permissions
1737         # By this point we know that DMs can't have got here unless they
1738         # are allowed to deal with the package concerned so just apply
1739         # normal checks
1740         if fpr.binary_acl.access_level == 'full':
1741             return
1742
1743         # Otherwise we're in the map case
1744         tmparches = self.pkg.changes["architecture"].copy()
1745         tmparches.pop('source', None)
1746
1747         for bam in fpr.binary_acl_map:
1748             tmparches.pop(bam.architecture.arch_string, None)
1749
1750         if len(tmparches.keys()) > 0:
1751             if fpr.binary_reject:
1752                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1753                 if len(tmparches.keys()) == 1:
1754                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1755                 else:
1756                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1757                 self.rejects.append(rej)
1758             else:
1759                 # TODO: This is where we'll implement reject vs throw away binaries later
1760                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1761                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1762                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1763                 self.rejects.append(rej)
1764
1765
1766     def check_upload_blocks(self, fpr, session):
1767         """Check whether any upload blocks apply to this source, source
1768            version, uid / fpr combination"""
1769
1770         def block_rej_template(fb):
1771             rej = 'Manual upload block in place for package %s' % fb.source
1772             if fb.version is not None:
1773                 rej += ', version %s' % fb.version
1774             return rej
1775
1776         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1777             # version is None if the block applies to all versions
1778             if fb.version is None or fb.version == self.pkg.changes['version']:
1779                 # Check both fpr and uid - either is enough to cause a reject
1780                 if fb.fpr is not None:
1781                     if fb.fpr.fingerprint == fpr.fingerprint:
1782                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1783                 if fb.uid is not None:
1784                     if fb.uid == fpr.uid:
1785                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1786
1787
1788     def check_dm_upload(self, fpr, session):
1789         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1790         ## none of the uploaded packages are NEW
1791         rej = False
1792         for f in self.pkg.files.keys():
1793             if self.pkg.files[f].has_key("byhand"):
1794                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1795                 rej = True
1796             if self.pkg.files[f].has_key("new"):
1797                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1798                 rej = True
1799
1800         if rej:
1801             return
1802
1803         r = get_newest_source(self.pkg.changes["source"], session)
1804
1805         if r is None:
1806             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1807             self.rejects.append(rej)
1808             return
1809
1810         if not r.dm_upload_allowed:
1811             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1812             self.rejects.append(rej)
1813             return
1814
1815         ## the Maintainer: field of the uploaded .changes file corresponds with
1816         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1817         ## uploads)
1818         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1819             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1820
1821         ## the most recent version of the package uploaded to unstable or
1822         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1823         ## non-developer maintainers cannot NMU or hijack packages)
1824
1825         # uploader includes the maintainer
1826         accept = False
1827         for uploader in r.uploaders:
1828             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1829             # Eww - I hope we never have two people with the same name in Debian
1830             if email == fpr.uid.uid or name == fpr.uid.name:
1831                 accept = True
1832                 break
1833
1834         if not accept:
1835             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1836             return
1837
1838         ## none of the packages are being taken over from other source packages
1839         for b in self.pkg.changes["binary"].keys():
1840             for suite in self.pkg.changes["distribution"].keys():
1841                 for s in get_source_by_package_and_suite(b, suite, session):
1842                     if s.source != self.pkg.changes["source"]:
1843                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1844
1845
1846
1847     def check_transition(self, session):
1848         cnf = Config()
1849
1850         sourcepkg = self.pkg.changes["source"]
1851
1852         # No sourceful upload -> no need to do anything else, direct return
1853         # We also work with unstable uploads, not experimental or those going to some
1854         # proposed-updates queue
1855         if "source" not in self.pkg.changes["architecture"] or \
1856            "unstable" not in self.pkg.changes["distribution"]:
1857             return
1858
1859         # Also only check if there is a file defined (and existant) with
1860         # checks.
1861         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1862         if transpath == "" or not os.path.exists(transpath):
1863             return
1864
1865         # Parse the yaml file
1866         sourcefile = file(transpath, 'r')
1867         sourcecontent = sourcefile.read()
1868         try:
1869             transitions = yaml.load(sourcecontent)
1870         except yaml.YAMLError, msg:
1871             # This shouldn't happen, there is a wrapper to edit the file which
1872             # checks it, but we prefer to be safe than ending up rejecting
1873             # everything.
1874             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1875             return
1876
1877         # Now look through all defined transitions
1878         for trans in transitions:
1879             t = transitions[trans]
1880             source = t["source"]
1881             expected = t["new"]
1882
1883             # Will be None if nothing is in testing.
1884             current = get_source_in_suite(source, "testing", session)
1885             if current is not None:
1886                 compare = apt_pkg.VersionCompare(current.version, expected)
1887
1888             if current is None or compare < 0:
1889                 # This is still valid, the current version in testing is older than
1890                 # the new version we wait for, or there is none in testing yet
1891
1892                 # Check if the source we look at is affected by this.
1893                 if sourcepkg in t['packages']:
1894                     # The source is affected, lets reject it.
1895
1896                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1897                         sourcepkg, trans)
1898
1899                     if current is not None:
1900                         currentlymsg = "at version %s" % (current.version)
1901                     else:
1902                         currentlymsg = "not present in testing"
1903
1904                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1905
1906                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1907 is part of a testing transition designed to get %s migrated (it is
1908 currently %s, we need version %s).  This transition is managed by the
1909 Release Team, and %s is the Release-Team member responsible for it.
1910 Please mail debian-release@lists.debian.org or contact %s directly if you
1911 need further assistance.  You might want to upload to experimental until this
1912 transition is done."""
1913                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1914
1915                     self.rejects.append(rejectmsg)
1916                     return
1917
1918     ###########################################################################
1919     # End check_signed_by_key checks
1920     ###########################################################################
1921
1922     def build_summaries(self):
1923         """ Build a summary of changes the upload introduces. """
1924
1925         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1926
1927         short_summary = summary
1928
1929         # This is for direport's benefit...
1930         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1931
1932         if byhand or new:
1933             summary += "Changes: " + f
1934
1935         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1936
1937         summary += self.announce(short_summary, 0)
1938
1939         return (summary, short_summary)
1940
1941     ###########################################################################
1942
1943     def close_bugs(self, summary, action):
1944         """
1945         Send mail to close bugs as instructed by the closes field in the changes file.
1946         Also add a line to summary if any work was done.
1947
1948         @type summary: string
1949         @param summary: summary text, as given by L{build_summaries}
1950
1951         @type action: bool
1952         @param action: Set to false no real action will be done.
1953
1954         @rtype: string
1955         @return: summary. If action was taken, extended by the list of closed bugs.
1956
1957         """
1958
1959         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1960
1961         bugs = self.pkg.changes["closes"].keys()
1962
1963         if not bugs:
1964             return summary
1965
1966         bugs.sort()
1967         summary += "Closing bugs: "
1968         for bug in bugs:
1969             summary += "%s " % (bug)
1970             if action:
1971                 self.update_subst()
1972                 self.Subst["__BUG_NUMBER__"] = bug
1973                 if self.pkg.changes["distribution"].has_key("stable"):
1974                     self.Subst["__STABLE_WARNING__"] = """
1975 Note that this package is not part of the released stable Debian
1976 distribution.  It may have dependencies on other unreleased software,
1977 or other instabilities.  Please take care if you wish to install it.
1978 The update will eventually make its way into the next released Debian
1979 distribution."""
1980                 else:
1981                     self.Subst["__STABLE_WARNING__"] = ""
1982                 mail_message = utils.TemplateSubst(self.Subst, template)
1983                 utils.send_mail(mail_message)
1984
1985                 # Clear up after ourselves
1986                 del self.Subst["__BUG_NUMBER__"]
1987                 del self.Subst["__STABLE_WARNING__"]
1988
1989         if action and self.logger:
1990             self.logger.log(["closing bugs"] + bugs)
1991
1992         summary += "\n"
1993
1994         return summary
1995
1996     ###########################################################################
1997
1998     def announce(self, short_summary, action):
1999         """
2000         Send an announce mail about a new upload.
2001
2002         @type short_summary: string
2003         @param short_summary: Short summary text to include in the mail
2004
2005         @type action: bool
2006         @param action: Set to false no real action will be done.
2007
2008         @rtype: string
2009         @return: Textstring about action taken.
2010
2011         """
2012
2013         cnf = Config()
2014         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2015
2016         # Only do announcements for source uploads with a recent dpkg-dev installed
2017         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2018            self.pkg.changes["architecture"].has_key("source"):
2019             return ""
2020
2021         lists_done = {}
2022         summary = ""
2023
2024         self.Subst["__SHORT_SUMMARY__"] = short_summary
2025
2026         for dist in self.pkg.changes["distribution"].keys():
2027             suite = get_suite(dist)
2028             if suite is None: continue
2029             announce_list = suite.announce
2030             if announce_list == "" or lists_done.has_key(announce_list):
2031                 continue
2032
2033             lists_done[announce_list] = 1
2034             summary += "Announcing to %s\n" % (announce_list)
2035
2036             if action:
2037                 self.update_subst()
2038                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2039                 if cnf.get("Dinstall::TrackingServer") and \
2040                    self.pkg.changes["architecture"].has_key("source"):
2041                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2042                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2043
2044                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2045                 utils.send_mail(mail_message)
2046
2047                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2048
2049         if cnf.FindB("Dinstall::CloseBugs"):
2050             summary = self.close_bugs(summary, action)
2051
2052         del self.Subst["__SHORT_SUMMARY__"]
2053
2054         return summary
2055
2056     ###########################################################################
2057     @session_wrapper
2058     def accept (self, summary, short_summary, session=None):
2059         """
2060         Accept an upload.
2061
2062         This moves all files referenced from the .changes into the pool,
2063         sends the accepted mail, announces to lists, closes bugs and
2064         also checks for override disparities. If enabled it will write out
2065         the version history for the BTS Version Tracking and will finally call
2066         L{queue_build}.
2067
2068         @type summary: string
2069         @param summary: Summary text
2070
2071         @type short_summary: string
2072         @param short_summary: Short summary
2073         """
2074
2075         cnf = Config()
2076         stats = SummaryStats()
2077
2078         print "Installing."
2079         self.logger.log(["installing changes", self.pkg.changes_file])
2080
2081         binaries = []
2082         poolfiles = []
2083
2084         # Add the .dsc file to the DB first
2085         for newfile, entry in self.pkg.files.items():
2086             if entry["type"] == "dsc":
2087                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2088                 for j in pfs:
2089                     poolfiles.append(j)
2090
2091         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2092         for newfile, entry in self.pkg.files.items():
2093             if entry["type"] == "deb":
2094                 b, pf = add_deb_to_db(self, newfile, session)
2095                 binaries.append(b)
2096                 poolfiles.append(pf)
2097
2098         # If this is a sourceful diff only upload that is moving
2099         # cross-component we need to copy the .orig files into the new
2100         # component too for the same reasons as above.
2101         # XXX: mhy: I think this should be in add_dsc_to_db
2102         if self.pkg.changes["architecture"].has_key("source"):
2103             for orig_file in self.pkg.orig_files.keys():
2104                 if not self.pkg.orig_files[orig_file].has_key("id"):
2105                     continue # Skip if it's not in the pool
2106                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2107                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2108                     continue # Skip if the location didn't change
2109
2110                 # Do the move
2111                 oldf = get_poolfile_by_id(orig_file_id, session)
2112                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2113                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2114                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2115
2116                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2117
2118                 # TODO: Care about size/md5sum collisions etc
2119                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2120
2121                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2122                 if newf is None:
2123                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2124                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2125
2126                     session.flush()
2127
2128                     # Don't reference the old file from this changes
2129                     for p in poolfiles:
2130                         if p.file_id == oldf.file_id:
2131                             poolfiles.remove(p)
2132
2133                     poolfiles.append(newf)
2134
2135                     # Fix up the DSC references
2136                     toremove = []
2137
2138                     for df in source.srcfiles:
2139                         if df.poolfile.file_id == oldf.file_id:
2140                             # Add a new DSC entry and mark the old one for deletion
2141                             # Don't do it in the loop so we don't change the thing we're iterating over
2142                             newdscf = DSCFile()
2143                             newdscf.source_id = source.source_id
2144                             newdscf.poolfile_id = newf.file_id
2145                             session.add(newdscf)
2146
2147                             toremove.append(df)
2148
2149                     for df in toremove:
2150                         session.delete(df)
2151
2152                     # Flush our changes
2153                     session.flush()
2154
2155                     # Make sure that our source object is up-to-date
2156                     session.expire(source)
2157
2158         # Add changelog information to the database
2159         self.store_changelog()
2160
2161         # Install the files into the pool
2162         for newfile, entry in self.pkg.files.items():
2163             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2164             utils.move(newfile, destination)
2165             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2166             stats.accept_bytes += float(entry["size"])
2167
2168         # Copy the .changes file across for suite which need it.
2169         copy_changes = dict([(x.copychanges, '')
2170                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2171                              if x.copychanges is not None])
2172
2173         for dest in copy_changes.keys():
2174             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2175
2176         # We're done - commit the database changes
2177         session.commit()
2178         # Our SQL session will automatically start a new transaction after
2179         # the last commit
2180
2181         # Now ensure that the metadata has been added
2182         # This has to be done after we copy the files into the pool
2183         # For source if we have it:
2184         if self.pkg.changes["architecture"].has_key("source"):
2185             import_metadata_into_db(source, session)
2186
2187         # Now for any of our binaries
2188         for b in binaries:
2189             import_metadata_into_db(b, session)
2190
2191         session.commit()
2192
2193         # Move the .changes into the 'done' directory
2194         ye, mo, da = time.gmtime()[0:3]
2195         donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2196         if not os.path.isdir(donedir):
2197             os.makedirs(donedir)
2198
2199         utils.move(self.pkg.changes_file,
2200                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2201
2202         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2203             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2204
2205         self.update_subst()
2206         self.Subst["__SUMMARY__"] = summary
2207         mail_message = utils.TemplateSubst(self.Subst,
2208                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2209         utils.send_mail(mail_message)
2210         self.announce(short_summary, 1)
2211
2212         ## Helper stuff for DebBugs Version Tracking
2213         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2214             if self.pkg.changes["architecture"].has_key("source"):
2215                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2216                 version_history = os.fdopen(fd, 'w')
2217                 version_history.write(self.pkg.dsc["bts changelog"])
2218                 version_history.close()
2219                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2220                                       self.pkg.changes_file[:-8]+".versions")
2221                 os.rename(temp_filename, filename)
2222                 os.chmod(filename, 0644)
2223
2224             # Write out the binary -> source mapping.
2225             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2226             debinfo = os.fdopen(fd, 'w')
2227             for name, entry in sorted(self.pkg.files.items()):
2228                 if entry["type"] == "deb":
2229                     line = " ".join([entry["package"], entry["version"],
2230                                      entry["architecture"], entry["source package"],
2231                                      entry["source version"]])
2232                     debinfo.write(line+"\n")
2233             debinfo.close()
2234             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2235                                   self.pkg.changes_file[:-8]+".debinfo")
2236             os.rename(temp_filename, filename)
2237             os.chmod(filename, 0644)
2238
2239         session.commit()
2240
2241         # Set up our copy queues (e.g. buildd queues)
2242         for suite_name in self.pkg.changes["distribution"].keys():
2243             suite = get_suite(suite_name, session)
2244             for q in suite.copy_queues:
2245                 for f in poolfiles:
2246                     q.add_file_from_pool(f)
2247
2248         session.commit()
2249
2250         # Finally...
2251         stats.accept_count += 1
2252
2253     def check_override(self):
2254         """
2255         Checks override entries for validity. Mails "Override disparity" warnings,
2256         if that feature is enabled.
2257
2258         Abandons the check if
2259           - override disparity checks are disabled
2260           - mail sending is disabled
2261         """
2262
2263         cnf = Config()
2264
2265         # Abandon the check if override disparity checks have been disabled
2266         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2267             return
2268
2269         summary = self.pkg.check_override()
2270
2271         if summary == "":
2272             return
2273
2274         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2275
2276         self.update_subst()
2277         self.Subst["__SUMMARY__"] = summary
2278         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2279         utils.send_mail(mail_message)
2280         del self.Subst["__SUMMARY__"]
2281
2282     ###########################################################################
2283
2284     def remove(self, from_dir=None):
2285         """
2286         Used (for instance) in p-u to remove the package from unchecked
2287
2288         Also removes the package from holding area.
2289         """
2290         if from_dir is None:
2291             from_dir = self.pkg.directory
2292         h = Holding()
2293
2294         for f in self.pkg.files.keys():
2295             os.unlink(os.path.join(from_dir, f))
2296             if os.path.exists(os.path.join(h.holding_dir, f)):
2297                 os.unlink(os.path.join(h.holding_dir, f))
2298
2299         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2300         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2301             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2302
2303     ###########################################################################
2304
2305     def move_to_queue (self, queue):
2306         """
2307         Move files to a destination queue using the permissions in the table
2308         """
2309         h = Holding()
2310         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2311                    queue.path, perms=int(queue.change_perms, 8))
2312         for f in self.pkg.files.keys():
2313             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2314
2315     ###########################################################################
2316
2317     def force_reject(self, reject_files):
2318         """
2319         Forcefully move files from the current directory to the
2320         reject directory.  If any file already exists in the reject
2321         directory it will be moved to the morgue to make way for
2322         the new file.
2323
2324         @type reject_files: dict
2325         @param reject_files: file dictionary
2326
2327         """
2328
2329         cnf = Config()
2330
2331         for file_entry in reject_files:
2332             # Skip any files which don't exist or which we don't have permission to copy.
2333             if os.access(file_entry, os.R_OK) == 0:
2334                 continue
2335
2336             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2337
2338             try:
2339                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2340             except OSError, e:
2341                 # File exists?  Let's find a new name by adding a number
2342                 if e.errno == errno.EEXIST:
2343                     try:
2344                         dest_file = utils.find_next_free(dest_file, 255)
2345                     except NoFreeFilenameError:
2346                         # Something's either gone badly Pete Tong, or
2347                         # someone is trying to exploit us.
2348                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2349                         return
2350
2351                     # Make sure we really got it
2352                     try:
2353                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2354                     except OSError, e:
2355                         # Likewise
2356                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2357                         return
2358                 else:
2359                     raise
2360             # If we got here, we own the destination file, so we can
2361             # safely overwrite it.
2362             utils.move(file_entry, dest_file, 1, perms=0660)
2363             os.close(dest_fd)
2364
2365     ###########################################################################
2366     def do_reject (self, manual=0, reject_message="", notes=""):
2367         """
2368         Reject an upload. If called without a reject message or C{manual} is
2369         true, spawn an editor so the user can write one.
2370
2371         @type manual: bool
2372         @param manual: manual or automated rejection
2373
2374         @type reject_message: string
2375         @param reject_message: A reject message
2376
2377         @return: 0
2378
2379         """
2380         # If we weren't given a manual rejection message, spawn an
2381         # editor so the user can add one in...
2382         if manual and not reject_message:
2383             (fd, temp_filename) = utils.temp_filename()
2384             temp_file = os.fdopen(fd, 'w')
2385             if len(notes) > 0:
2386                 for note in notes:
2387                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2388                                     % (note.author, note.version, note.notedate, note.comment))
2389             temp_file.close()
2390             editor = os.environ.get("EDITOR","vi")
2391             answer = 'E'
2392             while answer == 'E':
2393                 os.system("%s %s" % (editor, temp_filename))
2394                 temp_fh = utils.open_file(temp_filename)
2395                 reject_message = "".join(temp_fh.readlines())
2396                 temp_fh.close()
2397                 print "Reject message:"
2398                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2399                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2400                 answer = "XXX"
2401                 while prompt.find(answer) == -1:
2402                     answer = utils.our_raw_input(prompt)
2403                     m = re_default_answer.search(prompt)
2404                     if answer == "":
2405                         answer = m.group(1)
2406                     answer = answer[:1].upper()
2407             os.unlink(temp_filename)
2408             if answer == 'A':
2409                 return 1
2410             elif answer == 'Q':
2411                 sys.exit(0)
2412
2413         print "Rejecting.\n"
2414
2415         cnf = Config()
2416
2417         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2418         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2419
2420         # Move all the files into the reject directory
2421         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2422         self.force_reject(reject_files)
2423
2424         # If we fail here someone is probably trying to exploit the race
2425         # so let's just raise an exception ...
2426         if os.path.exists(reason_filename):
2427             os.unlink(reason_filename)
2428         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2429
2430         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2431
2432         self.update_subst()
2433         if not manual:
2434             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2435             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2436             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2437             os.write(reason_fd, reject_message)
2438             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2439         else:
2440             # Build up the rejection email
2441             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2442             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2443             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2444             self.Subst["__REJECT_MESSAGE__"] = ""
2445             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2446             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2447             # Write the rejection email out as the <foo>.reason file
2448             os.write(reason_fd, reject_mail_message)
2449
2450         del self.Subst["__REJECTOR_ADDRESS__"]
2451         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2452         del self.Subst["__CC__"]
2453
2454         os.close(reason_fd)
2455
2456         # Send the rejection mail
2457         utils.send_mail(reject_mail_message)
2458
2459         if self.logger:
2460             self.logger.log(["rejected", self.pkg.changes_file])
2461
2462         return 0
2463
2464     ################################################################################
2465     def in_override_p(self, package, component, suite, binary_type, filename, session):
2466         """
2467         Check if a package already has override entries in the DB
2468
2469         @type package: string
2470         @param package: package name
2471
2472         @type component: string
2473         @param component: database id of the component
2474
2475         @type suite: int
2476         @param suite: database id of the suite
2477
2478         @type binary_type: string
2479         @param binary_type: type of the package
2480
2481         @type filename: string
2482         @param filename: filename we check
2483
2484         @return: the database result. But noone cares anyway.
2485
2486         """
2487
2488         cnf = Config()
2489
2490         if binary_type == "": # must be source
2491             file_type = "dsc"
2492         else:
2493             file_type = binary_type
2494
2495         # Override suite name; used for example with proposed-updates
2496         oldsuite = get_suite(suite, session)
2497         if (not oldsuite is None) and oldsuite.overridesuite:
2498             suite = oldsuite.overridesuite
2499
2500         result = get_override(package, suite, component, file_type, session)
2501
2502         # If checking for a source package fall back on the binary override type
2503         if file_type == "dsc" and len(result) < 1:
2504             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2505
2506         # Remember the section and priority so we can check them later if appropriate
2507         if len(result) > 0:
2508             result = result[0]
2509             self.pkg.files[filename]["override section"] = result.section.section
2510             self.pkg.files[filename]["override priority"] = result.priority.priority
2511             return result
2512
2513         return None
2514
2515     ################################################################################
2516     def get_anyversion(self, sv_list, suite):
2517         """
2518         @type sv_list: list
2519         @param sv_list: list of (suite, version) tuples to check
2520
2521         @type suite: string
2522         @param suite: suite name
2523
2524         Description: TODO
2525         """
2526         Cnf = Config()
2527         anyversion = None
2528         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2529         for (s, v) in sv_list:
2530             if s in [ x.lower() for x in anysuite ]:
2531                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2532                     anyversion = v
2533
2534         return anyversion
2535
2536     ################################################################################
2537
2538     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2539         """
2540         @type sv_list: list
2541         @param sv_list: list of (suite, version) tuples to check
2542
2543         @type filename: string
2544         @param filename: XXX
2545
2546         @type new_version: string
2547         @param new_version: XXX
2548
2549         Ensure versions are newer than existing packages in target
2550         suites and that cross-suite version checking rules as
2551         set out in the conf file are satisfied.
2552         """
2553
2554         cnf = Config()
2555
2556         # Check versions for each target suite
2557         for target_suite in self.pkg.changes["distribution"].keys():
2558             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2559             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2560
2561             # Enforce "must be newer than target suite" even if conffile omits it
2562             if target_suite not in must_be_newer_than:
2563                 must_be_newer_than.append(target_suite)
2564
2565             for (suite, existent_version) in sv_list:
2566                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2567
2568                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2569                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2570
2571                 if suite in must_be_older_than and vercmp > -1:
2572                     cansave = 0
2573
2574                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2575                         # we really use the other suite, ignoring the conflicting one ...
2576                         addsuite = self.pkg.changes["distribution-version"][suite]
2577
2578                         add_version = self.get_anyversion(sv_list, addsuite)
2579                         target_version = self.get_anyversion(sv_list, target_suite)
2580
2581                         if not add_version:
2582                             # not add_version can only happen if we map to a suite
2583                             # that doesn't enhance the suite we're propup'ing from.
2584                             # so "propup-ver x a b c; map a d" is a problem only if
2585                             # d doesn't enhance a.
2586                             #
2587                             # i think we could always propagate in this case, rather
2588                             # than complaining. either way, this isn't a REJECT issue
2589                             #
2590                             # And - we really should complain to the dorks who configured dak
2591                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2592                             self.pkg.changes.setdefault("propdistribution", {})
2593                             self.pkg.changes["propdistribution"][addsuite] = 1
2594                             cansave = 1
2595                         elif not target_version:
2596                             # not targets_version is true when the package is NEW
2597                             # we could just stick with the "...old version..." REJECT
2598                             # for this, I think.
2599                             self.rejects.append("Won't propogate NEW packages.")
2600                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2601                             # propogation would be redundant. no need to reject though.
2602                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2603                             cansave = 1
2604                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2605                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2606                             # propogate!!
2607                             self.warnings.append("Propogating upload to %s" % (addsuite))
2608                             self.pkg.changes.setdefault("propdistribution", {})
2609                             self.pkg.changes["propdistribution"][addsuite] = 1
2610                             cansave = 1
2611
2612                     if not cansave:
2613                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2614
2615     ################################################################################
2616     def check_binary_against_db(self, filename, session):
2617         # Ensure version is sane
2618         self.cross_suite_version_check( \
2619             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2620                 self.pkg.files[filename]["architecture"], session),
2621             filename, self.pkg.files[filename]["version"], sourceful=False)
2622
2623         # Check for any existing copies of the file
2624         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2625         q = q.filter_by(version=self.pkg.files[filename]["version"])
2626         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2627
2628         if q.count() > 0:
2629             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2630
2631     ################################################################################
2632
2633     def check_source_against_db(self, filename, session):
2634         source = self.pkg.dsc.get("source")
2635         version = self.pkg.dsc.get("version")
2636
2637         # Ensure version is sane
2638         self.cross_suite_version_check( \
2639             get_suite_version_by_source(source, session), filename, version,
2640             sourceful=True)
2641
2642     ################################################################################
2643     def check_dsc_against_db(self, filename, session):
2644         """
2645
2646         @warning: NB: this function can remove entries from the 'files' index [if
2647          the orig tarball is a duplicate of the one in the archive]; if
2648          you're iterating over 'files' and call this function as part of
2649          the loop, be sure to add a check to the top of the loop to
2650          ensure you haven't just tried to dereference the deleted entry.
2651
2652         """
2653
2654         Cnf = Config()
2655         self.pkg.orig_files = {} # XXX: do we need to clear it?
2656         orig_files = self.pkg.orig_files
2657
2658         # Try and find all files mentioned in the .dsc.  This has
2659         # to work harder to cope with the multiple possible
2660         # locations of an .orig.tar.gz.
2661         # The ordering on the select is needed to pick the newest orig
2662         # when it exists in multiple places.
2663         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2664             found = None
2665             if self.pkg.files.has_key(dsc_name):
2666                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2667                 actual_size = int(self.pkg.files[dsc_name]["size"])
2668                 found = "%s in incoming" % (dsc_name)
2669
2670                 # Check the file does not already exist in the archive
2671                 ql = get_poolfile_like_name(dsc_name, session)
2672
2673                 # Strip out anything that isn't '%s' or '/%s$'
2674                 for i in ql:
2675                     if not i.filename.endswith(dsc_name):
2676                         ql.remove(i)
2677
2678                 # "[dak] has not broken them.  [dak] has fixed a
2679                 # brokenness.  Your crappy hack exploited a bug in
2680                 # the old dinstall.
2681                 #
2682                 # "(Come on!  I thought it was always obvious that
2683                 # one just doesn't release different files with
2684                 # the same name and version.)"
2685                 #                        -- ajk@ on d-devel@l.d.o
2686
2687                 if len(ql) > 0:
2688                     # Ignore exact matches for .orig.tar.gz
2689                     match = 0
2690                     if re_is_orig_source.match(dsc_name):
2691                         for i in ql:
2692                             if self.pkg.files.has_key(dsc_name) and \
2693                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2694                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2695                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2696                                 # TODO: Don't delete the entry, just mark it as not needed
2697                                 # This would fix the stupidity of changing something we often iterate over
2698                                 # whilst we're doing it
2699                                 del self.pkg.files[dsc_name]
2700                                 dsc_entry["files id"] = i.file_id
2701                                 if not orig_files.has_key(dsc_name):
2702                                     orig_files[dsc_name] = {}
2703                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2704                                 match = 1
2705
2706                                 # Don't bitch that we couldn't find this file later
2707                                 try:
2708                                     self.later_check_files.remove(dsc_name)
2709                                 except ValueError:
2710                                     pass
2711
2712
2713                     if not match:
2714                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2715
2716             elif re_is_orig_source.match(dsc_name):
2717                 # Check in the pool
2718                 ql = get_poolfile_like_name(dsc_name, session)
2719
2720                 # Strip out anything that isn't '%s' or '/%s$'
2721                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2722                 for i in ql:
2723                     if not i.filename.endswith(dsc_name):
2724                         ql.remove(i)
2725
2726                 if len(ql) > 0:
2727                     # Unfortunately, we may get more than one match here if,
2728                     # for example, the package was in potato but had an -sa
2729                     # upload in woody.  So we need to choose the right one.
2730
2731                     # default to something sane in case we don't match any or have only one
2732                     x = ql[0]
2733
2734                     if len(ql) > 1:
2735                         for i in ql:
2736                             old_file = os.path.join(i.location.path, i.filename)
2737                             old_file_fh = utils.open_file(old_file)
2738                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2739                             old_file_fh.close()
2740                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2741                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2742                                 x = i
2743
2744                     old_file = os.path.join(i.location.path, i.filename)
2745                     old_file_fh = utils.open_file(old_file)
2746                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2747                     old_file_fh.close()
2748                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2749                     found = old_file
2750                     suite_type = x.location.archive_type
2751                     # need this for updating dsc_files in install()
2752                     dsc_entry["files id"] = x.file_id
2753                     # See install() in process-accepted...
2754                     if not orig_files.has_key(dsc_name):
2755                         orig_files[dsc_name] = {}
2756                     orig_files[dsc_name]["id"] = x.file_id
2757                     orig_files[dsc_name]["path"] = old_file
2758                     orig_files[dsc_name]["location"] = x.location.location_id
2759                 else:
2760                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2761                     # Not there? Check the queue directories...
2762                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2763                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2764                             continue
2765                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2766                         if os.path.exists(in_otherdir):
2767                             in_otherdir_fh = utils.open_file(in_otherdir)
2768                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2769                             in_otherdir_fh.close()
2770                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2771                             found = in_otherdir
2772                             if not orig_files.has_key(dsc_name):
2773                                 orig_files[dsc_name] = {}
2774                             orig_files[dsc_name]["path"] = in_otherdir
2775
2776                     if not found:
2777                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2778                         continue
2779             else:
2780                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2781                 continue
2782             if actual_md5 != dsc_entry["md5sum"]:
2783                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2784             if actual_size != int(dsc_entry["size"]):
2785                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2786
2787     ################################################################################
2788     # This is used by process-new and process-holding to recheck a changes file
2789     # at the time we're running.  It mainly wraps various other internal functions
2790     # and is similar to accepted_checks - these should probably be tidied up
2791     # and combined
2792     def recheck(self, session):
2793         cnf = Config()
2794         for f in self.pkg.files.keys():
2795             # The .orig.tar.gz can disappear out from under us is it's a
2796             # duplicate of one in the archive.
2797             if not self.pkg.files.has_key(f):
2798                 continue
2799
2800             entry = self.pkg.files[f]
2801
2802             # Check that the source still exists
2803             if entry["type"] == "deb":
2804                 source_version = entry["source version"]
2805                 source_package = entry["source package"]
2806                 if not self.pkg.changes["architecture"].has_key("source") \
2807                    and not source_exists(source_package, source_version, \
2808                     suites = self.pkg.changes["distribution"].keys(), session = session):
2809                     source_epochless_version = re_no_epoch.sub('', source_version)
2810                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2811                     found = False
2812                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2813                         if cnf.has_key("Dir::Queue::%s" % (q)):
2814                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2815                                 found = True
2816                     if not found:
2817                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2818
2819             # Version and file overwrite checks
2820             if entry["type"] == "deb":
2821                 self.check_binary_against_db(f, session)
2822             elif entry["type"] == "dsc":
2823                 self.check_source_against_db(f, session)
2824                 self.check_dsc_against_db(f, session)
2825
2826     ################################################################################
2827     def accepted_checks(self, overwrite_checks, session):
2828         # Recheck anything that relies on the database; since that's not
2829         # frozen between accept and our run time when called from p-a.
2830
2831         # overwrite_checks is set to False when installing to stable/oldstable
2832
2833         propogate={}
2834         nopropogate={}
2835
2836         # Find the .dsc (again)
2837         dsc_filename = None
2838         for f in self.pkg.files.keys():
2839             if self.pkg.files[f]["type"] == "dsc":
2840                 dsc_filename = f
2841
2842         for checkfile in self.pkg.files.keys():
2843             # The .orig.tar.gz can disappear out from under us is it's a
2844             # duplicate of one in the archive.
2845             if not self.pkg.files.has_key(checkfile):
2846                 continue
2847
2848             entry = self.pkg.files[checkfile]
2849
2850             # Check that the source still exists
2851             if entry["type"] == "deb":
2852                 source_version = entry["source version"]
2853                 source_package = entry["source package"]
2854                 if not self.pkg.changes["architecture"].has_key("source") \
2855                    and not source_exists(source_package, source_version, \
2856                     suites = self.pkg.changes["distribution"].keys(), \
2857                     session = session):
2858                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2859
2860             # Version and file overwrite checks
2861             if overwrite_checks:
2862                 if entry["type"] == "deb":
2863                     self.check_binary_against_db(checkfile, session)
2864                 elif entry["type"] == "dsc":
2865                     self.check_source_against_db(checkfile, session)
2866                     self.check_dsc_against_db(dsc_filename, session)
2867
2868             # propogate in the case it is in the override tables:
2869             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2870                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2871                     propogate[suite] = 1
2872                 else:
2873                     nopropogate[suite] = 1
2874
2875         for suite in propogate.keys():
2876             if suite in nopropogate:
2877                 continue
2878             self.pkg.changes["distribution"][suite] = 1
2879
2880         for checkfile in self.pkg.files.keys():
2881             # Check the package is still in the override tables
2882             for suite in self.pkg.changes["distribution"].keys():
2883                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2884                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2885
2886     ################################################################################
2887     # If any file of an upload has a recent mtime then chances are good
2888     # the file is still being uploaded.
2889
2890     def upload_too_new(self):
2891         cnf = Config()
2892         too_new = False
2893         # Move back to the original directory to get accurate time stamps
2894         cwd = os.getcwd()
2895         os.chdir(self.pkg.directory)
2896         file_list = self.pkg.files.keys()
2897         file_list.extend(self.pkg.dsc_files.keys())
2898         file_list.append(self.pkg.changes_file)
2899         for f in file_list:
2900             try:
2901                 last_modified = time.time()-os.path.getmtime(f)
2902                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2903                     too_new = True
2904                     break
2905             except:
2906                 pass
2907
2908         os.chdir(cwd)
2909         return too_new
2910
2911     def store_changelog(self):
2912
2913         # Skip binary-only upload if it is not a bin-NMU
2914         if not self.pkg.changes['architecture'].has_key('source'):
2915             from daklib.regexes import re_bin_only_nmu
2916             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2917                 return
2918
2919         session = DBConn().session()
2920
2921         # Check if upload already has a changelog entry
2922         query = """SELECT changelog_id FROM changes WHERE source = :source
2923                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2924         if session.execute(query, {'source': self.pkg.changes['source'], \
2925                                    'version': self.pkg.changes['version'], \
2926                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2927             session.commit()
2928             return
2929
2930         # Add current changelog text into changelogs_text table, return created ID
2931         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2932         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2933
2934         # Link ID to the upload available in changes table
2935         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2936                    AND version = :version AND architecture = :architecture"""
2937         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2938                                 'version': self.pkg.changes['version'], \
2939                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2940
2941         session.commit()