]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Display changes in ACCEPTED mails (Closes: #261096)
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_list(dsc, session).items():
137             if package not in new:
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = member.mtime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = member.mtime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         if cnf.has_key("Dinstall::BugServer"):
447             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError as line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError as line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError as format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError as msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError as msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not get_suite(suite.lower()):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_info()[0]))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError as e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882
883                 byhand_dir = get_policy_queue('byhand', session).path
884                 new_dir = get_policy_queue('new', session).path
885
886                 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
887                     entry["byhand"] = 1
888                 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
889                     entry["new"] = 1
890                 else:
891                     dsc_file_exists = False
892                     # TODO: Don't hardcode this list: use all relevant queues
893                     #       The question is how to determine what is relevant
894                     for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
895                         queue = get_policy_queue(queue_name, session)
896                         if queue:
897                             if os.path.exists(os.path.join(queue.path, dsc_filename)):
898                                 dsc_file_exists = True
899                                 break
900
901                     if not dsc_file_exists:
902                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
903
904         # Check the version and for file overwrites
905         self.check_binary_against_db(f, session)
906
907     def source_file_checks(self, f, session):
908         entry = self.pkg.files[f]
909
910         m = re_issource.match(f)
911         if not m:
912             return
913
914         entry["package"] = m.group(1)
915         entry["version"] = m.group(2)
916         entry["type"] = m.group(3)
917
918         # Ensure the source package name matches the Source filed in the .changes
919         if self.pkg.changes["source"] != entry["package"]:
920             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
921
922         # Ensure the source version matches the version in the .changes file
923         if re_is_orig_source.match(f):
924             changes_version = self.pkg.changes["chopversion2"]
925         else:
926             changes_version = self.pkg.changes["chopversion"]
927
928         if changes_version != entry["version"]:
929             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
930
931         # Ensure the .changes lists source in the Architecture field
932         if not self.pkg.changes["architecture"].has_key("source"):
933             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
934
935         # Check the signature of a .dsc file
936         if entry["type"] == "dsc":
937             # check_signature returns either:
938             #  (None, [list, of, rejects]) or (signature, [])
939             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
940             for j in rejects:
941                 self.rejects.append(j)
942
943         entry["architecture"] = "source"
944
945     def per_suite_file_checks(self, f, suite, session):
946         cnf = Config()
947         entry = self.pkg.files[f]
948
949         # Skip byhand
950         if entry.has_key("byhand"):
951             return
952
953         # Check we have fields we need to do these checks
954         oktogo = True
955         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956             if not entry.has_key(m):
957                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
958                 oktogo = False
959
960         if not oktogo:
961             return
962
963         # Handle component mappings
964         for m in cnf.ValueList("ComponentMappings"):
965             (source, dest) = m.split()
966             if entry["component"] == source:
967                 entry["original component"] = source
968                 entry["component"] = dest
969
970         # Ensure the component is valid for the target suite
971         if entry["component"] not in get_component_names(session):
972             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973             return
974
975         # Validate the component
976         if not get_component(entry["component"], session):
977             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978             return
979
980         # See if the package is NEW
981         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982             entry["new"] = 1
983
984         # Validate the priority
985         if entry["priority"].find('/') != -1:
986             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
987
988         # Determine the location
989         location = cnf["Dir::Pool"]
990         l = get_location(location, entry["component"], session=session)
991         if l is None:
992             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993             entry["location id"] = -1
994         else:
995             entry["location id"] = l.location_id
996
997         # Check the md5sum & size against existing files (if any)
998         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
999
1000         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001                                          entry["size"], entry["md5sum"], entry["location id"])
1002
1003         if found is None:
1004             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005         elif found is False and poolfile is not None:
1006             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1007         else:
1008             if poolfile is None:
1009                 entry["files id"] = None
1010             else:
1011                 entry["files id"] = poolfile.file_id
1012
1013         # Check for packages that have moved from one component to another
1014         entry['suite'] = suite
1015         arch_list = [entry["architecture"], 'all']
1016         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017             [suite], arch_list = arch_list, session = session)
1018         if component is not None:
1019             entry["othercomponents"] = component
1020
1021     def check_files(self, action=True):
1022         file_keys = self.pkg.files.keys()
1023         holding = Holding()
1024         cnf = Config()
1025
1026         if action:
1027             cwd = os.getcwd()
1028             os.chdir(self.pkg.directory)
1029             for f in file_keys:
1030                 ret = holding.copy_to_holding(f)
1031                 if ret is not None:
1032                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1033
1034             os.chdir(cwd)
1035
1036         # check we already know the changes file
1037         # [NB: this check must be done post-suite mapping]
1038         base_filename = os.path.basename(self.pkg.changes_file)
1039
1040         session = DBConn().session()
1041
1042         try:
1043             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044             # if in the pool or in a queue other than unchecked, reject
1045             if (dbc.in_queue is None) \
1046                    or (dbc.in_queue is not None
1047                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048                 self.rejects.append("%s file already known to dak" % base_filename)
1049         except NoResultFound as e:
1050             # not known, good
1051             pass
1052
1053         has_binaries = False
1054         has_source = False
1055
1056         for f, entry in self.pkg.files.items():
1057             # Ensure the file does not already exist in one of the accepted directories
1058             # TODO: Dynamically generate this list
1059             for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1060                 queue = get_policy_queue(queue_name, session)
1061                 if queue and os.path.exists(os.path.join(queue.path, f)):
1062                     self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1063
1064             if not re_taint_free.match(f):
1065                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1066
1067             # Check the file is readable
1068             if os.access(f, os.R_OK) == 0:
1069                 # When running in -n, copy_to_holding() won't have
1070                 # generated the reject_message, so we need to.
1071                 if action:
1072                     if os.path.exists(f):
1073                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1074                     else:
1075                         # Don't directly reject, mark to check later to deal with orig's
1076                         # we can find in the pool
1077                         self.later_check_files.append(f)
1078                 entry["type"] = "unreadable"
1079                 continue
1080
1081             # If it's byhand skip remaining checks
1082             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1083                 entry["byhand"] = 1
1084                 entry["type"] = "byhand"
1085
1086             # Checks for a binary package...
1087             elif re_isadeb.match(f):
1088                 has_binaries = True
1089                 entry["type"] = "deb"
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.binary_file_checks(f, session)
1093
1094             # Checks for a source package...
1095             elif re_issource.match(f):
1096                 has_source = True
1097
1098                 # This routine appends to self.rejects/warnings as appropriate
1099                 self.source_file_checks(f, session)
1100
1101             # Not a binary or source package?  Assume byhand...
1102             else:
1103                 entry["byhand"] = 1
1104                 entry["type"] = "byhand"
1105
1106             # Per-suite file checks
1107             entry["oldfiles"] = {}
1108             for suite in self.pkg.changes["distribution"].keys():
1109                 self.per_suite_file_checks(f, suite, session)
1110
1111         session.close()
1112
1113         # If the .changes file says it has source, it must have source.
1114         if self.pkg.changes["architecture"].has_key("source"):
1115             if not has_source:
1116                 self.rejects.append("no source found and Architecture line in changes mention source.")
1117
1118             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1119                 self.rejects.append("source only uploads are not supported.")
1120
1121     ###########################################################################
1122
1123     def __dsc_filename(self):
1124         """
1125         Returns: (Status, Dsc_Filename)
1126         where
1127           Status: Boolean; True when there was no error, False otherwise
1128           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1129         """
1130         dsc_filename = None
1131
1132         # find the dsc
1133         for name, entry in self.pkg.files.items():
1134             if entry.has_key("type") and entry["type"] == "dsc":
1135                 if dsc_filename:
1136                     return False, "cannot process a .changes file with multiple .dsc's."
1137                 else:
1138                     dsc_filename = name
1139
1140         if not dsc_filename:
1141             return False, "source uploads must contain a dsc file"
1142
1143         return True, dsc_filename
1144
1145     def load_dsc(self, action=True, signing_rules=1):
1146         """
1147         Find and load the dsc from self.pkg.files into self.dsc
1148
1149         Returns: (Status, Reason)
1150         where
1151           Status: Boolean; True when there was no error, False otherwise
1152           Reason: String; When Status is False this describes the error
1153         """
1154
1155         # find the dsc
1156         (status, dsc_filename) = self.__dsc_filename()
1157         if not status:
1158             # If status is false, dsc_filename has the reason
1159             return False, dsc_filename
1160
1161         try:
1162             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1163         except CantOpenError:
1164             if not action:
1165                 return False, "%s: can't read file." % (dsc_filename)
1166         except ParseChangesError as line:
1167             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1168         except InvalidDscError as line:
1169             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1170         except ChangesUnicodeError:
1171             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1172
1173         return True, None
1174
1175     ###########################################################################
1176
1177     def check_dsc(self, action=True, session=None):
1178         """Returns bool indicating whether or not the source changes are valid"""
1179         # Ensure there is source to check
1180         if not self.pkg.changes["architecture"].has_key("source"):
1181             return True
1182
1183         if session is None:
1184             session = DBConn().session()
1185
1186         (status, reason) = self.load_dsc(action=action)
1187         if not status:
1188             self.rejects.append(reason)
1189             return False
1190         (status, dsc_filename) = self.__dsc_filename()
1191         if not status:
1192             # If status is false, dsc_filename has the reason
1193             self.rejects.append(dsc_filename)
1194             return False
1195
1196         # Build up the file list of files mentioned by the .dsc
1197         try:
1198             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1199         except NoFilesFieldError:
1200             self.rejects.append("%s: no Files: field." % (dsc_filename))
1201             return False
1202         except UnknownFormatError as format:
1203             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1204             return False
1205         except ParseChangesError as line:
1206             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1207             return False
1208
1209         # Enforce mandatory fields
1210         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1211             if not self.pkg.dsc.has_key(i):
1212                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1213                 return False
1214
1215         # Validate the source and version fields
1216         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1217             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1218         if not re_valid_version.match(self.pkg.dsc["version"]):
1219             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1220
1221         # Only a limited list of source formats are allowed in each suite
1222         for dist in self.pkg.changes["distribution"].keys():
1223             suite = get_suite(dist, session=session)
1224             if not suite:
1225                 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1226                 continue
1227             allowed = [ x.format_name for x in suite.srcformats ]
1228             if self.pkg.dsc["format"] not in allowed:
1229                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1230
1231         # Validate the Maintainer field
1232         try:
1233             # We ignore the return value
1234             fix_maintainer(self.pkg.dsc["maintainer"])
1235         except ParseMaintError as msg:
1236             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1237                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1238
1239         # Validate the build-depends field(s)
1240         for field_name in [ "build-depends", "build-depends-indep" ]:
1241             field = self.pkg.dsc.get(field_name)
1242             if field:
1243                 # Have apt try to parse them...
1244                 try:
1245                     apt_pkg.ParseSrcDepends(field)
1246                 except:
1247                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1248
1249         # Ensure the version number in the .dsc matches the version number in the .changes
1250         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1251         changes_version = self.pkg.files[dsc_filename]["version"]
1252
1253         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1254             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1255
1256         # Ensure the Files field contain only what's expected
1257         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1258
1259         # Ensure source is newer than existing source in target suites
1260         session = DBConn().session()
1261         self.check_source_against_db(dsc_filename, session)
1262         self.check_dsc_against_db(dsc_filename, session)
1263
1264         dbchg = get_dbchange(self.pkg.changes_file, session)
1265
1266         # Finally, check if we're missing any files
1267         for f in self.later_check_files:
1268             print 'XXX: %s' % f
1269             # Check if we've already processed this file if we have a dbchg object
1270             ok = False
1271             if dbchg:
1272                 for pf in dbchg.files:
1273                     if pf.filename == f and pf.processed:
1274                         self.notes.append('%s was already processed so we can go ahead' % f)
1275                         ok = True
1276                         del self.pkg.files[f]
1277             if not ok:
1278                 self.rejects.append("Could not find file %s references in changes" % f)
1279
1280         session.close()
1281
1282         return (len(self.rejects) == 0)
1283
1284     ###########################################################################
1285
1286     def get_changelog_versions(self, source_dir):
1287         """Extracts a the source package and (optionally) grabs the
1288         version history out of debian/changelog for the BTS."""
1289
1290         cnf = Config()
1291
1292         # Find the .dsc (again)
1293         dsc_filename = None
1294         for f in self.pkg.files.keys():
1295             if self.pkg.files[f]["type"] == "dsc":
1296                 dsc_filename = f
1297
1298         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1299         if not dsc_filename:
1300             return
1301
1302         # Create a symlink mirror of the source files in our temporary directory
1303         for f in self.pkg.files.keys():
1304             m = re_issource.match(f)
1305             if m:
1306                 src = os.path.join(source_dir, f)
1307                 # If a file is missing for whatever reason, give up.
1308                 if not os.path.exists(src):
1309                     return
1310                 ftype = m.group(3)
1311                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1312                    self.pkg.orig_files[f].has_key("path"):
1313                     continue
1314                 dest = os.path.join(os.getcwd(), f)
1315                 os.symlink(src, dest)
1316
1317         # If the orig files are not a part of the upload, create symlinks to the
1318         # existing copies.
1319         for orig_file in self.pkg.orig_files.keys():
1320             if not self.pkg.orig_files[orig_file].has_key("path"):
1321                 continue
1322             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1323             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1324
1325         # Extract the source
1326         try:
1327             unpacked = UnpackedSource(dsc_filename)
1328         except Exception as e:
1329             self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1330             return
1331
1332         if not cnf.Find("Dir::BTSVersionTrack"):
1333             return
1334
1335         # Get the upstream version
1336         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1337         if re_strip_revision.search(upstr_version):
1338             upstr_version = re_strip_revision.sub('', upstr_version)
1339
1340         # Ensure the changelog file exists
1341         changelog_file = unpacked.get_changelog_file()
1342         if changelog_file is None:
1343             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1344             return
1345
1346         # Parse the changelog
1347         self.pkg.dsc["bts changelog"] = ""
1348         for line in changelog_file.readlines():
1349             m = re_changelog_versions.match(line)
1350             if m:
1351                 self.pkg.dsc["bts changelog"] += line
1352         changelog_file.close()
1353         unpacked.cleanup()
1354
1355         # Check we found at least one revision in the changelog
1356         if not self.pkg.dsc["bts changelog"]:
1357             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1358
1359     def check_source(self):
1360         # Bail out if:
1361         #    a) there's no source
1362         if not self.pkg.changes["architecture"].has_key("source"):
1363             return
1364
1365         tmpdir = utils.temp_dirname()
1366
1367         # Move into the temporary directory
1368         cwd = os.getcwd()
1369         os.chdir(tmpdir)
1370
1371         # Get the changelog version history
1372         self.get_changelog_versions(cwd)
1373
1374         # Move back and cleanup the temporary tree
1375         os.chdir(cwd)
1376
1377         try:
1378             shutil.rmtree(tmpdir)
1379         except OSError as e:
1380             if e.errno != errno.EACCES:
1381                 print "foobar"
1382                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1383
1384             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1385             # We probably have u-r or u-w directories so chmod everything
1386             # and try again.
1387             cmd = "chmod -R u+rwx %s" % (tmpdir)
1388             result = os.system(cmd)
1389             if result != 0:
1390                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1391             shutil.rmtree(tmpdir)
1392         except Exception as e:
1393             print "foobar2 (%s)" % e
1394             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1395
1396     ###########################################################################
1397     def ensure_hashes(self):
1398         # Make sure we recognise the format of the Files: field in the .changes
1399         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1400         if len(format) == 2:
1401             format = int(format[0]), int(format[1])
1402         else:
1403             format = int(float(format[0])), 0
1404
1405         # We need to deal with the original changes blob, as the fields we need
1406         # might not be in the changes dict serialised into the .dak anymore.
1407         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1408
1409         # Copy the checksums over to the current changes dict.  This will keep
1410         # the existing modifications to it intact.
1411         for field in orig_changes:
1412             if field.startswith('checksums-'):
1413                 self.pkg.changes[field] = orig_changes[field]
1414
1415         # Check for unsupported hashes
1416         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1417             self.rejects.append(j)
1418
1419         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1420             self.rejects.append(j)
1421
1422         # We have to calculate the hash if we have an earlier changes version than
1423         # the hash appears in rather than require it exist in the changes file
1424         for hashname, hashfunc, version in utils.known_hashes:
1425             # TODO: Move _ensure_changes_hash into this class
1426             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1427                 self.rejects.append(j)
1428             if "source" in self.pkg.changes["architecture"]:
1429                 # TODO: Move _ensure_dsc_hash into this class
1430                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1431                     self.rejects.append(j)
1432
1433     def check_hashes(self):
1434         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1435             self.rejects.append(m)
1436
1437         for m in utils.check_size(".changes", self.pkg.files):
1438             self.rejects.append(m)
1439
1440         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1441             self.rejects.append(m)
1442
1443         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1444             self.rejects.append(m)
1445
1446         self.ensure_hashes()
1447
1448     ###########################################################################
1449
1450     def ensure_orig(self, target_dir='.', session=None):
1451         """
1452         Ensures that all orig files mentioned in the changes file are present
1453         in target_dir. If they do not exist, they are symlinked into place.
1454
1455         An list containing the symlinks that were created are returned (so they
1456         can be removed).
1457         """
1458
1459         symlinked = []
1460         cnf = Config()
1461
1462         for filename, entry in self.pkg.dsc_files.iteritems():
1463             if not re_is_orig_source.match(filename):
1464                 # File is not an orig; ignore
1465                 continue
1466
1467             if os.path.exists(filename):
1468                 # File exists, no need to continue
1469                 continue
1470
1471             def symlink_if_valid(path):
1472                 f = utils.open_file(path)
1473                 md5sum = apt_pkg.md5sum(f)
1474                 f.close()
1475
1476                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1477                 expected = (int(entry['size']), entry['md5sum'])
1478
1479                 if fingerprint != expected:
1480                     return False
1481
1482                 dest = os.path.join(target_dir, filename)
1483
1484                 os.symlink(path, dest)
1485                 symlinked.append(dest)
1486
1487                 return True
1488
1489             session_ = session
1490             if session is None:
1491                 session_ = DBConn().session()
1492
1493             found = False
1494
1495             # Look in the pool
1496             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1497                 poolfile_path = os.path.join(
1498                     poolfile.location.path, poolfile.filename
1499                 )
1500
1501                 if symlink_if_valid(poolfile_path):
1502                     found = True
1503                     break
1504
1505             if session is None:
1506                 session_.close()
1507
1508             if found:
1509                 continue
1510
1511             # Look in some other queues for the file
1512             queue_names = ['new', 'byhand',
1513                            'proposedupdates', 'oldproposedupdates',
1514                            'embargoed', 'unembargoed']
1515
1516             for queue_name in queue_names:
1517                 queue = get_policy_queue(queue_name, session)
1518                 if not queue:
1519                     continue
1520
1521                 queuefile_path = os.path.join(queue.path, filename)
1522
1523                 if not os.path.exists(queuefile_path):
1524                     # Does not exist in this queue
1525                     continue
1526
1527                 if symlink_if_valid(queuefile_path):
1528                     break
1529
1530         return symlinked
1531
1532     ###########################################################################
1533
1534     def check_lintian(self):
1535         """
1536         Extends self.rejects by checking the output of lintian against tags
1537         specified in Dinstall::LintianTags.
1538         """
1539
1540         cnf = Config()
1541
1542         # Don't reject binary uploads
1543         if not self.pkg.changes['architecture'].has_key('source'):
1544             return
1545
1546         # Only check some distributions
1547         for dist in ('unstable', 'experimental'):
1548             if dist in self.pkg.changes['distribution']:
1549                 break
1550         else:
1551             return
1552
1553         # If we do not have a tagfile, don't do anything
1554         tagfile = cnf.get("Dinstall::LintianTags")
1555         if not tagfile:
1556             return
1557
1558         # Parse the yaml file
1559         sourcefile = file(tagfile, 'r')
1560         sourcecontent = sourcefile.read()
1561         sourcefile.close()
1562
1563         try:
1564             lintiantags = yaml.load(sourcecontent)['lintian']
1565         except yaml.YAMLError as msg:
1566             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1567             return
1568
1569         # Try and find all orig mentioned in the .dsc
1570         symlinked = self.ensure_orig()
1571
1572         # Setup the input file for lintian
1573         fd, temp_filename = utils.temp_filename()
1574         temptagfile = os.fdopen(fd, 'w')
1575         for tags in lintiantags.values():
1576             temptagfile.writelines(['%s\n' % x for x in tags])
1577         temptagfile.close()
1578
1579         try:
1580             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1581                 (temp_filename, self.pkg.changes_file)
1582
1583             result, output = commands.getstatusoutput(cmd)
1584         finally:
1585             # Remove our tempfile and any symlinks we created
1586             os.unlink(temp_filename)
1587
1588             for symlink in symlinked:
1589                 os.unlink(symlink)
1590
1591         if result == 2:
1592             utils.warn("lintian failed for %s [return code: %s]." % \
1593                 (self.pkg.changes_file, result))
1594             utils.warn(utils.prefix_multi_line_string(output, \
1595                 " [possible output:] "))
1596
1597         def log(*txt):
1598             if self.logger:
1599                 self.logger.log(
1600                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1601                 )
1602
1603         # Generate messages
1604         parsed_tags = parse_lintian_output(output)
1605         self.rejects.extend(
1606             generate_reject_messages(parsed_tags, lintiantags, log=log)
1607         )
1608
1609     ###########################################################################
1610     def check_urgency(self):
1611         cnf = Config()
1612         if self.pkg.changes["architecture"].has_key("source"):
1613             if not self.pkg.changes.has_key("urgency"):
1614                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1615             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1616             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1617                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1618                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1619                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1620
1621     ###########################################################################
1622
1623     # Sanity check the time stamps of files inside debs.
1624     # [Files in the near future cause ugly warnings and extreme time
1625     #  travel can cause errors on extraction]
1626
1627     def check_timestamps(self):
1628         Cnf = Config()
1629
1630         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1631         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1632         tar = TarTime(future_cutoff, past_cutoff)
1633
1634         for filename, entry in self.pkg.files.items():
1635             if entry["type"] == "deb":
1636                 tar.reset()
1637                 try:
1638                     deb = apt_inst.DebFile(filename)
1639                     deb.control.go(tar.callback)
1640
1641                     future_files = tar.future_files.keys()
1642                     if future_files:
1643                         num_future_files = len(future_files)
1644                         future_file = future_files[0]
1645                         future_date = tar.future_files[future_file]
1646                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1647                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1648
1649                     ancient_files = tar.ancient_files.keys()
1650                     if ancient_files:
1651                         num_ancient_files = len(ancient_files)
1652                         ancient_file = ancient_files[0]
1653                         ancient_date = tar.ancient_files[ancient_file]
1654                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1655                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1656                 except:
1657                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1658
1659     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1660         for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1661             if not self.pkg.changes.has_key(key):
1662                 return False
1663         uid_email = '@'.join(uid_email.split('@')[:2])
1664         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1665             sponsored = False
1666         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1667             sponsored = False
1668             if uid_name == "":
1669                 sponsored = True
1670         else:
1671             sponsored = True
1672             sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1673             debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1674             if uid_email not in debian_emails:
1675                 if debian_emails:
1676                     uid_email = debian_emails[0]
1677             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1678                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1679                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1680                         self.pkg.changes["sponsoremail"] = uid_email
1681
1682         return sponsored
1683
1684
1685     ###########################################################################
1686     # check_signed_by_key checks
1687     ###########################################################################
1688
1689     def check_signed_by_key(self):
1690         """Ensure the .changes is signed by an authorized uploader."""
1691         session = DBConn().session()
1692
1693         # First of all we check that the person has proper upload permissions
1694         # and that this upload isn't blocked
1695         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1696
1697         if fpr is None:
1698             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1699             return
1700
1701         # TODO: Check that import-keyring adds UIDs properly
1702         if not fpr.uid:
1703             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1704             return
1705
1706         # Check that the fingerprint which uploaded has permission to do so
1707         self.check_upload_permissions(fpr, session)
1708
1709         # Check that this package is not in a transition
1710         self.check_transition(session)
1711
1712         session.close()
1713
1714
1715     def check_upload_permissions(self, fpr, session):
1716         # Check any one-off upload blocks
1717         self.check_upload_blocks(fpr, session)
1718
1719         # If the source_acl is None, source is never allowed
1720         if fpr.source_acl is None:
1721             if self.pkg.changes["architecture"].has_key("source"):
1722                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1723                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1724                 self.rejects.append(rej)
1725                 return
1726         # Do DM as a special case
1727         # DM is a special case unfortunately, so we check it first
1728         # (keys with no source access get more access than DMs in one
1729         #  way; DMs can only upload for their packages whether source
1730         #  or binary, whereas keys with no access might be able to
1731         #  upload some binaries)
1732         elif fpr.source_acl.access_level == 'dm':
1733             self.check_dm_upload(fpr, session)
1734         else:
1735             # If not a DM, we allow full upload rights
1736             uid_email = "%s@debian.org" % (fpr.uid.uid)
1737             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1738
1739
1740         # Check binary upload permissions
1741         # By this point we know that DMs can't have got here unless they
1742         # are allowed to deal with the package concerned so just apply
1743         # normal checks
1744         if fpr.binary_acl.access_level == 'full':
1745             return
1746
1747         # Otherwise we're in the map case
1748         tmparches = self.pkg.changes["architecture"].copy()
1749         tmparches.pop('source', None)
1750
1751         for bam in fpr.binary_acl_map:
1752             tmparches.pop(bam.architecture.arch_string, None)
1753
1754         if len(tmparches.keys()) > 0:
1755             if fpr.binary_reject:
1756                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1757                 if len(tmparches.keys()) == 1:
1758                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1759                 else:
1760                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1761                 self.rejects.append(rej)
1762             else:
1763                 # TODO: This is where we'll implement reject vs throw away binaries later
1764                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1765                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1766                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1767                 self.rejects.append(rej)
1768
1769
1770     def check_upload_blocks(self, fpr, session):
1771         """Check whether any upload blocks apply to this source, source
1772            version, uid / fpr combination"""
1773
1774         def block_rej_template(fb):
1775             rej = 'Manual upload block in place for package %s' % fb.source
1776             if fb.version is not None:
1777                 rej += ', version %s' % fb.version
1778             return rej
1779
1780         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1781             # version is None if the block applies to all versions
1782             if fb.version is None or fb.version == self.pkg.changes['version']:
1783                 # Check both fpr and uid - either is enough to cause a reject
1784                 if fb.fpr is not None:
1785                     if fb.fpr.fingerprint == fpr.fingerprint:
1786                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1787                 if fb.uid is not None:
1788                     if fb.uid == fpr.uid:
1789                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1790
1791
1792     def check_dm_upload(self, fpr, session):
1793         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1794         ## none of the uploaded packages are NEW
1795         rej = False
1796         for f in self.pkg.files.keys():
1797             if self.pkg.files[f].has_key("byhand"):
1798                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1799                 rej = True
1800             if self.pkg.files[f].has_key("new"):
1801                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1802                 rej = True
1803
1804         if rej:
1805             return
1806
1807         r = get_newest_source(self.pkg.changes["source"], session)
1808
1809         if r is None:
1810             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1811             self.rejects.append(rej)
1812             return
1813
1814         if not r.dm_upload_allowed:
1815             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1816             self.rejects.append(rej)
1817             return
1818
1819         ## the Maintainer: field of the uploaded .changes file corresponds with
1820         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1821         ## uploads)
1822         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1823             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1824
1825         ## the most recent version of the package uploaded to unstable or
1826         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1827         ## non-developer maintainers cannot NMU or hijack packages)
1828
1829         # uploader includes the maintainer
1830         accept = False
1831         for uploader in r.uploaders:
1832             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1833             # Eww - I hope we never have two people with the same name in Debian
1834             if email == fpr.uid.uid or name == fpr.uid.name:
1835                 accept = True
1836                 break
1837
1838         if not accept:
1839             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1840             return
1841
1842         ## none of the packages are being taken over from other source packages
1843         for b in self.pkg.changes["binary"].keys():
1844             for suite in self.pkg.changes["distribution"].keys():
1845                 for s in get_source_by_package_and_suite(b, suite, session):
1846                     if s.source != self.pkg.changes["source"]:
1847                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1848
1849
1850
1851     def check_transition(self, session):
1852         cnf = Config()
1853
1854         sourcepkg = self.pkg.changes["source"]
1855
1856         # No sourceful upload -> no need to do anything else, direct return
1857         # We also work with unstable uploads, not experimental or those going to some
1858         # proposed-updates queue
1859         if "source" not in self.pkg.changes["architecture"] or \
1860            "unstable" not in self.pkg.changes["distribution"]:
1861             return
1862
1863         # Also only check if there is a file defined (and existant) with
1864         # checks.
1865         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1866         if transpath == "" or not os.path.exists(transpath):
1867             return
1868
1869         # Parse the yaml file
1870         sourcefile = file(transpath, 'r')
1871         sourcecontent = sourcefile.read()
1872         try:
1873             transitions = yaml.load(sourcecontent)
1874         except yaml.YAMLError as msg:
1875             # This shouldn't happen, there is a wrapper to edit the file which
1876             # checks it, but we prefer to be safe than ending up rejecting
1877             # everything.
1878             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1879             return
1880
1881         # Now look through all defined transitions
1882         for trans in transitions:
1883             t = transitions[trans]
1884             source = t["source"]
1885             expected = t["new"]
1886
1887             # Will be None if nothing is in testing.
1888             current = get_source_in_suite(source, "testing", session)
1889             if current is not None:
1890                 compare = apt_pkg.VersionCompare(current.version, expected)
1891
1892             if current is None or compare < 0:
1893                 # This is still valid, the current version in testing is older than
1894                 # the new version we wait for, or there is none in testing yet
1895
1896                 # Check if the source we look at is affected by this.
1897                 if sourcepkg in t['packages']:
1898                     # The source is affected, lets reject it.
1899
1900                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1901                         sourcepkg, trans)
1902
1903                     if current is not None:
1904                         currentlymsg = "at version %s" % (current.version)
1905                     else:
1906                         currentlymsg = "not present in testing"
1907
1908                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1909
1910                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1911 is part of a testing transition designed to get %s migrated (it is
1912 currently %s, we need version %s).  This transition is managed by the
1913 Release Team, and %s is the Release-Team member responsible for it.
1914 Please mail debian-release@lists.debian.org or contact %s directly if you
1915 need further assistance.  You might want to upload to experimental until this
1916 transition is done."""
1917                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1918
1919                     self.rejects.append(rejectmsg)
1920                     return
1921
1922     ###########################################################################
1923     # End check_signed_by_key checks
1924     ###########################################################################
1925
1926     def build_summaries(self):
1927         """ Build a summary of changes the upload introduces. """
1928
1929         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1930
1931         short_summary = summary
1932
1933         # This is for direport's benefit...
1934         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1935
1936         summary += "\n\nChanges:\n" + f
1937
1938         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1939
1940         summary += self.announce(short_summary, 0)
1941
1942         return (summary, short_summary)
1943
1944     ###########################################################################
1945
1946     def close_bugs(self, summary, action):
1947         """
1948         Send mail to close bugs as instructed by the closes field in the changes file.
1949         Also add a line to summary if any work was done.
1950
1951         @type summary: string
1952         @param summary: summary text, as given by L{build_summaries}
1953
1954         @type action: bool
1955         @param action: Set to false no real action will be done.
1956
1957         @rtype: string
1958         @return: summary. If action was taken, extended by the list of closed bugs.
1959
1960         """
1961
1962         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1963
1964         bugs = self.pkg.changes["closes"].keys()
1965
1966         if not bugs:
1967             return summary
1968
1969         bugs.sort()
1970         summary += "Closing bugs: "
1971         for bug in bugs:
1972             summary += "%s " % (bug)
1973             if action:
1974                 self.update_subst()
1975                 self.Subst["__BUG_NUMBER__"] = bug
1976                 if self.pkg.changes["distribution"].has_key("stable"):
1977                     self.Subst["__STABLE_WARNING__"] = """
1978 Note that this package is not part of the released stable Debian
1979 distribution.  It may have dependencies on other unreleased software,
1980 or other instabilities.  Please take care if you wish to install it.
1981 The update will eventually make its way into the next released Debian
1982 distribution."""
1983                 else:
1984                     self.Subst["__STABLE_WARNING__"] = ""
1985                 mail_message = utils.TemplateSubst(self.Subst, template)
1986                 utils.send_mail(mail_message)
1987
1988                 # Clear up after ourselves
1989                 del self.Subst["__BUG_NUMBER__"]
1990                 del self.Subst["__STABLE_WARNING__"]
1991
1992         if action and self.logger:
1993             self.logger.log(["closing bugs"] + bugs)
1994
1995         summary += "\n"
1996
1997         return summary
1998
1999     ###########################################################################
2000
2001     def announce(self, short_summary, action):
2002         """
2003         Send an announce mail about a new upload.
2004
2005         @type short_summary: string
2006         @param short_summary: Short summary text to include in the mail
2007
2008         @type action: bool
2009         @param action: Set to false no real action will be done.
2010
2011         @rtype: string
2012         @return: Textstring about action taken.
2013
2014         """
2015
2016         cnf = Config()
2017
2018         # Skip all of this if not sending mail to avoid confusing people
2019         if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2020             return ""
2021
2022         # Only do announcements for source uploads with a recent dpkg-dev installed
2023         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2024            self.pkg.changes["architecture"].has_key("source"):
2025             return ""
2026
2027         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2028
2029         lists_todo = {}
2030         summary = ""
2031
2032         # Get a unique list of target lists
2033         for dist in self.pkg.changes["distribution"].keys():
2034             suite = get_suite(dist)
2035             if suite is None: continue
2036             for tgt in suite.announce:
2037                 lists_todo[tgt] = 1
2038
2039         self.Subst["__SHORT_SUMMARY__"] = short_summary
2040
2041         for announce_list in lists_todo.keys():
2042             summary += "Announcing to %s\n" % (announce_list)
2043
2044             if action:
2045                 self.update_subst()
2046                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2047                 if cnf.get("Dinstall::TrackingServer") and \
2048                    self.pkg.changes["architecture"].has_key("source"):
2049                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2050                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2051
2052                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2053                 utils.send_mail(mail_message)
2054
2055                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2056
2057         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2058             summary = self.close_bugs(summary, action)
2059
2060         del self.Subst["__SHORT_SUMMARY__"]
2061
2062         return summary
2063
2064     ###########################################################################
2065     @session_wrapper
2066     def accept (self, summary, short_summary, session=None):
2067         """
2068         Accept an upload.
2069
2070         This moves all files referenced from the .changes into the pool,
2071         sends the accepted mail, announces to lists, closes bugs and
2072         also checks for override disparities. If enabled it will write out
2073         the version history for the BTS Version Tracking and will finally call
2074         L{queue_build}.
2075
2076         @type summary: string
2077         @param summary: Summary text
2078
2079         @type short_summary: string
2080         @param short_summary: Short summary
2081         """
2082
2083         cnf = Config()
2084         stats = SummaryStats()
2085
2086         print "Installing."
2087         self.logger.log(["installing changes", self.pkg.changes_file])
2088
2089         binaries = []
2090         poolfiles = []
2091
2092         # Add the .dsc file to the DB first
2093         for newfile, entry in self.pkg.files.items():
2094             if entry["type"] == "dsc":
2095                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2096                 for j in pfs:
2097                     poolfiles.append(j)
2098
2099         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2100         for newfile, entry in self.pkg.files.items():
2101             if entry["type"] == "deb":
2102                 b, pf = add_deb_to_db(self, newfile, session)
2103                 binaries.append(b)
2104                 poolfiles.append(pf)
2105
2106         # If this is a sourceful diff only upload that is moving
2107         # cross-component we need to copy the .orig files into the new
2108         # component too for the same reasons as above.
2109         # XXX: mhy: I think this should be in add_dsc_to_db
2110         if self.pkg.changes["architecture"].has_key("source"):
2111             for orig_file in self.pkg.orig_files.keys():
2112                 if not self.pkg.orig_files[orig_file].has_key("id"):
2113                     continue # Skip if it's not in the pool
2114                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2115                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2116                     continue # Skip if the location didn't change
2117
2118                 # Do the move
2119                 oldf = get_poolfile_by_id(orig_file_id, session)
2120                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2121                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2122                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2123
2124                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2125
2126                 # TODO: Care about size/md5sum collisions etc
2127                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2128
2129                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2130                 if newf is None:
2131                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2132                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2133
2134                     session.flush()
2135
2136                     # Don't reference the old file from this changes
2137                     for p in poolfiles:
2138                         if p.file_id == oldf.file_id:
2139                             poolfiles.remove(p)
2140
2141                     poolfiles.append(newf)
2142
2143                     # Fix up the DSC references
2144                     toremove = []
2145
2146                     for df in source.srcfiles:
2147                         if df.poolfile.file_id == oldf.file_id:
2148                             # Add a new DSC entry and mark the old one for deletion
2149                             # Don't do it in the loop so we don't change the thing we're iterating over
2150                             newdscf = DSCFile()
2151                             newdscf.source_id = source.source_id
2152                             newdscf.poolfile_id = newf.file_id
2153                             session.add(newdscf)
2154
2155                             toremove.append(df)
2156
2157                     for df in toremove:
2158                         session.delete(df)
2159
2160                     # Flush our changes
2161                     session.flush()
2162
2163                     # Make sure that our source object is up-to-date
2164                     session.expire(source)
2165
2166         # Add changelog information to the database
2167         self.store_changelog()
2168
2169         # Install the files into the pool
2170         for newfile, entry in self.pkg.files.items():
2171             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2172             utils.move(newfile, destination)
2173             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2174             stats.accept_bytes += float(entry["size"])
2175
2176         # Copy the .changes file across for suite which need it.
2177         copy_changes = dict([(x.copychanges, '')
2178                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2179                              if x.copychanges is not None])
2180
2181         for dest in copy_changes.keys():
2182             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2183
2184         # We're done - commit the database changes
2185         session.commit()
2186         # Our SQL session will automatically start a new transaction after
2187         # the last commit
2188
2189         # Now ensure that the metadata has been added
2190         # This has to be done after we copy the files into the pool
2191         # For source if we have it:
2192         if self.pkg.changes["architecture"].has_key("source"):
2193             import_metadata_into_db(source, session)
2194
2195         # Now for any of our binaries
2196         for b in binaries:
2197             import_metadata_into_db(b, session)
2198
2199         session.commit()
2200
2201         # Move the .changes into the 'done' directory
2202         ye, mo, da = time.gmtime()[0:3]
2203         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2204         if not os.path.isdir(donedir):
2205             os.makedirs(donedir)
2206
2207         utils.move(self.pkg.changes_file,
2208                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2209
2210         if self.pkg.changes["architecture"].has_key("source"):
2211             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2212
2213         self.update_subst()
2214         self.Subst["__SUMMARY__"] = summary
2215         mail_message = utils.TemplateSubst(self.Subst,
2216                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2217         utils.send_mail(mail_message)
2218         self.announce(short_summary, 1)
2219
2220         ## Helper stuff for DebBugs Version Tracking
2221         if cnf.Find("Dir::BTSVersionTrack"):
2222             if self.pkg.changes["architecture"].has_key("source"):
2223                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2224                 version_history = os.fdopen(fd, 'w')
2225                 version_history.write(self.pkg.dsc["bts changelog"])
2226                 version_history.close()
2227                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2228                                       self.pkg.changes_file[:-8]+".versions")
2229                 os.rename(temp_filename, filename)
2230                 os.chmod(filename, 0o644)
2231
2232             # Write out the binary -> source mapping.
2233             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2234             debinfo = os.fdopen(fd, 'w')
2235             for name, entry in sorted(self.pkg.files.items()):
2236                 if entry["type"] == "deb":
2237                     line = " ".join([entry["package"], entry["version"],
2238                                      entry["architecture"], entry["source package"],
2239                                      entry["source version"]])
2240                     debinfo.write(line+"\n")
2241             debinfo.close()
2242             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2243                                   self.pkg.changes_file[:-8]+".debinfo")
2244             os.rename(temp_filename, filename)
2245             os.chmod(filename, 0o644)
2246
2247         session.commit()
2248
2249         # Set up our copy queues (e.g. buildd queues)
2250         for suite_name in self.pkg.changes["distribution"].keys():
2251             suite = get_suite(suite_name, session)
2252             for q in suite.copy_queues:
2253                 for f in poolfiles:
2254                     q.add_file_from_pool(f)
2255
2256         session.commit()
2257
2258         # Finally...
2259         stats.accept_count += 1
2260
2261     def check_override(self):
2262         """
2263         Checks override entries for validity. Mails "Override disparity" warnings,
2264         if that feature is enabled.
2265
2266         Abandons the check if
2267           - override disparity checks are disabled
2268           - mail sending is disabled
2269         """
2270
2271         cnf = Config()
2272
2273         # Abandon the check if override disparity checks have been disabled
2274         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2275             return
2276
2277         summary = self.pkg.check_override()
2278
2279         if summary == "":
2280             return
2281
2282         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2283
2284         self.update_subst()
2285         self.Subst["__SUMMARY__"] = summary
2286         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2287         utils.send_mail(mail_message)
2288         del self.Subst["__SUMMARY__"]
2289
2290     ###########################################################################
2291
2292     def remove(self, from_dir=None):
2293         """
2294         Used (for instance) in p-u to remove the package from unchecked
2295
2296         Also removes the package from holding area.
2297         """
2298         if from_dir is None:
2299             from_dir = self.pkg.directory
2300         h = Holding()
2301
2302         for f in self.pkg.files.keys():
2303             os.unlink(os.path.join(from_dir, f))
2304             if os.path.exists(os.path.join(h.holding_dir, f)):
2305                 os.unlink(os.path.join(h.holding_dir, f))
2306
2307         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2308         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2309             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2310
2311     ###########################################################################
2312
2313     def move_to_queue (self, queue):
2314         """
2315         Move files to a destination queue using the permissions in the table
2316         """
2317         h = Holding()
2318         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2319                    queue.path, perms=int(queue.change_perms, 8))
2320         for f in self.pkg.files.keys():
2321             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2322
2323     ###########################################################################
2324
2325     def force_reject(self, reject_files):
2326         """
2327         Forcefully move files from the current directory to the
2328         reject directory.  If any file already exists in the reject
2329         directory it will be moved to the morgue to make way for
2330         the new file.
2331
2332         @type reject_files: dict
2333         @param reject_files: file dictionary
2334
2335         """
2336
2337         cnf = Config()
2338
2339         for file_entry in reject_files:
2340             # Skip any files which don't exist or which we don't have permission to copy.
2341             if os.access(file_entry, os.R_OK) == 0:
2342                 continue
2343
2344             dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2345
2346             try:
2347                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2348             except OSError as e:
2349                 # File exists?  Let's find a new name by adding a number
2350                 if e.errno == errno.EEXIST:
2351                     try:
2352                         dest_file = utils.find_next_free(dest_file, 255)
2353                     except NoFreeFilenameError:
2354                         # Something's either gone badly Pete Tong, or
2355                         # someone is trying to exploit us.
2356                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2357                         return
2358
2359                     # Make sure we really got it
2360                     try:
2361                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2362                     except OSError as e:
2363                         # Likewise
2364                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2365                         return
2366                 else:
2367                     raise
2368             # If we got here, we own the destination file, so we can
2369             # safely overwrite it.
2370             utils.move(file_entry, dest_file, 1, perms=0o660)
2371             os.close(dest_fd)
2372
2373     ###########################################################################
2374     def do_reject (self, manual=0, reject_message="", notes=""):
2375         """
2376         Reject an upload. If called without a reject message or C{manual} is
2377         true, spawn an editor so the user can write one.
2378
2379         @type manual: bool
2380         @param manual: manual or automated rejection
2381
2382         @type reject_message: string
2383         @param reject_message: A reject message
2384
2385         @return: 0
2386
2387         """
2388         # If we weren't given a manual rejection message, spawn an
2389         # editor so the user can add one in...
2390         if manual and not reject_message:
2391             (fd, temp_filename) = utils.temp_filename()
2392             temp_file = os.fdopen(fd, 'w')
2393             if len(notes) > 0:
2394                 for note in notes:
2395                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2396                                     % (note.author, note.version, note.notedate, note.comment))
2397             temp_file.close()
2398             editor = os.environ.get("EDITOR","vi")
2399             answer = 'E'
2400             while answer == 'E':
2401                 os.system("%s %s" % (editor, temp_filename))
2402                 temp_fh = utils.open_file(temp_filename)
2403                 reject_message = "".join(temp_fh.readlines())
2404                 temp_fh.close()
2405                 print "Reject message:"
2406                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2407                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2408                 answer = "XXX"
2409                 while prompt.find(answer) == -1:
2410                     answer = utils.our_raw_input(prompt)
2411                     m = re_default_answer.search(prompt)
2412                     if answer == "":
2413                         answer = m.group(1)
2414                     answer = answer[:1].upper()
2415             os.unlink(temp_filename)
2416             if answer == 'A':
2417                 return 1
2418             elif answer == 'Q':
2419                 sys.exit(0)
2420
2421         print "Rejecting.\n"
2422
2423         cnf = Config()
2424
2425         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2426         reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2427         changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2428
2429         # Move all the files into the reject directory
2430         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2431         self.force_reject(reject_files)
2432
2433         # Change permissions of the .changes file to be world readable
2434         os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2435
2436         # If we fail here someone is probably trying to exploit the race
2437         # so let's just raise an exception ...
2438         if os.path.exists(reason_filename):
2439             os.unlink(reason_filename)
2440         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2441
2442         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2443
2444         self.update_subst()
2445         if not manual:
2446             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2447             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2448             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2449             os.write(reason_fd, reject_message)
2450             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2451         else:
2452             # Build up the rejection email
2453             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2454             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2455             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2456             self.Subst["__REJECT_MESSAGE__"] = ""
2457             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2458             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2459             # Write the rejection email out as the <foo>.reason file
2460             os.write(reason_fd, reject_mail_message)
2461
2462         del self.Subst["__REJECTOR_ADDRESS__"]
2463         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2464         del self.Subst["__CC__"]
2465
2466         os.close(reason_fd)
2467
2468         # Send the rejection mail
2469         utils.send_mail(reject_mail_message)
2470
2471         if self.logger:
2472             self.logger.log(["rejected", self.pkg.changes_file])
2473
2474         stats = SummaryStats()
2475         stats.reject_count += 1
2476         return 0
2477
2478     ################################################################################
2479     def in_override_p(self, package, component, suite, binary_type, filename, session):
2480         """
2481         Check if a package already has override entries in the DB
2482
2483         @type package: string
2484         @param package: package name
2485
2486         @type component: string
2487         @param component: database id of the component
2488
2489         @type suite: int
2490         @param suite: database id of the suite
2491
2492         @type binary_type: string
2493         @param binary_type: type of the package
2494
2495         @type filename: string
2496         @param filename: filename we check
2497
2498         @return: the database result. But noone cares anyway.
2499
2500         """
2501
2502         cnf = Config()
2503
2504         if binary_type == "": # must be source
2505             file_type = "dsc"
2506         else:
2507             file_type = binary_type
2508
2509         # Override suite name; used for example with proposed-updates
2510         oldsuite = get_suite(suite, session)
2511         if (not oldsuite is None) and oldsuite.overridesuite:
2512             suite = oldsuite.overridesuite
2513
2514         result = get_override(package, suite, component, file_type, session)
2515
2516         # If checking for a source package fall back on the binary override type
2517         if file_type == "dsc" and len(result) < 1:
2518             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2519
2520         # Remember the section and priority so we can check them later if appropriate
2521         if len(result) > 0:
2522             result = result[0]
2523             self.pkg.files[filename]["override section"] = result.section.section
2524             self.pkg.files[filename]["override priority"] = result.priority.priority
2525             return result
2526
2527         return None
2528
2529     ################################################################################
2530     def get_anyversion(self, sv_list, suite):
2531         """
2532         @type sv_list: list
2533         @param sv_list: list of (suite, version) tuples to check
2534
2535         @type suite: string
2536         @param suite: suite name
2537
2538         Description: TODO
2539         """
2540         Cnf = Config()
2541         anyversion = None
2542         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2543         for (s, v) in sv_list:
2544             if s in [ x.lower() for x in anysuite ]:
2545                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2546                     anyversion = v
2547
2548         return anyversion
2549
2550     ################################################################################
2551
2552     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2553         """
2554         @type sv_list: list
2555         @param sv_list: list of (suite, version) tuples to check
2556
2557         @type filename: string
2558         @param filename: XXX
2559
2560         @type new_version: string
2561         @param new_version: XXX
2562
2563         Ensure versions are newer than existing packages in target
2564         suites and that cross-suite version checking rules as
2565         set out in the conf file are satisfied.
2566         """
2567
2568         cnf = Config()
2569
2570         # Check versions for each target suite
2571         for target_suite in self.pkg.changes["distribution"].keys():
2572             # Check we can find the target suite
2573             ts = get_suite(target_suite)
2574             if ts is None:
2575                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2576                 continue
2577
2578             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2579             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2580
2581             # Enforce "must be newer than target suite" even if conffile omits it
2582             if target_suite not in must_be_newer_than:
2583                 must_be_newer_than.append(target_suite)
2584
2585             for (suite, existent_version) in sv_list:
2586                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2587
2588                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2589                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2590
2591                 if suite in must_be_older_than and vercmp > -1:
2592                     cansave = 0
2593
2594                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2595                         # we really use the other suite, ignoring the conflicting one ...
2596                         addsuite = self.pkg.changes["distribution-version"][suite]
2597
2598                         add_version = self.get_anyversion(sv_list, addsuite)
2599                         target_version = self.get_anyversion(sv_list, target_suite)
2600
2601                         if not add_version:
2602                             # not add_version can only happen if we map to a suite
2603                             # that doesn't enhance the suite we're propup'ing from.
2604                             # so "propup-ver x a b c; map a d" is a problem only if
2605                             # d doesn't enhance a.
2606                             #
2607                             # i think we could always propagate in this case, rather
2608                             # than complaining. either way, this isn't a REJECT issue
2609                             #
2610                             # And - we really should complain to the dorks who configured dak
2611                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2612                             self.pkg.changes.setdefault("propdistribution", {})
2613                             self.pkg.changes["propdistribution"][addsuite] = 1
2614                             cansave = 1
2615                         elif not target_version:
2616                             # not targets_version is true when the package is NEW
2617                             # we could just stick with the "...old version..." REJECT
2618                             # for this, I think.
2619                             self.rejects.append("Won't propogate NEW packages.")
2620                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2621                             # propogation would be redundant. no need to reject though.
2622                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2623                             cansave = 1
2624                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2625                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2626                             # propogate!!
2627                             self.warnings.append("Propogating upload to %s" % (addsuite))
2628                             self.pkg.changes.setdefault("propdistribution", {})
2629                             self.pkg.changes["propdistribution"][addsuite] = 1
2630                             cansave = 1
2631
2632                     if not cansave:
2633                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2634
2635     ################################################################################
2636     def check_binary_against_db(self, filename, session):
2637         # Ensure version is sane
2638         self.cross_suite_version_check( \
2639             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2640                 self.pkg.files[filename]["architecture"], session),
2641             filename, self.pkg.files[filename]["version"], sourceful=False)
2642
2643         # Check for any existing copies of the file
2644         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2645         q = q.filter_by(version=self.pkg.files[filename]["version"])
2646         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2647
2648         if q.count() > 0:
2649             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2650
2651     ################################################################################
2652
2653     def check_source_against_db(self, filename, session):
2654         source = self.pkg.dsc.get("source")
2655         version = self.pkg.dsc.get("version")
2656
2657         # Ensure version is sane
2658         self.cross_suite_version_check( \
2659             get_suite_version_by_source(source, session), filename, version,
2660             sourceful=True)
2661
2662     ################################################################################
2663     def check_dsc_against_db(self, filename, session):
2664         """
2665
2666         @warning: NB: this function can remove entries from the 'files' index [if
2667          the orig tarball is a duplicate of the one in the archive]; if
2668          you're iterating over 'files' and call this function as part of
2669          the loop, be sure to add a check to the top of the loop to
2670          ensure you haven't just tried to dereference the deleted entry.
2671
2672         """
2673
2674         Cnf = Config()
2675         self.pkg.orig_files = {} # XXX: do we need to clear it?
2676         orig_files = self.pkg.orig_files
2677
2678         # Try and find all files mentioned in the .dsc.  This has
2679         # to work harder to cope with the multiple possible
2680         # locations of an .orig.tar.gz.
2681         # The ordering on the select is needed to pick the newest orig
2682         # when it exists in multiple places.
2683         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2684             found = None
2685             if self.pkg.files.has_key(dsc_name):
2686                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2687                 actual_size = int(self.pkg.files[dsc_name]["size"])
2688                 found = "%s in incoming" % (dsc_name)
2689
2690                 # Check the file does not already exist in the archive
2691                 ql = get_poolfile_like_name(dsc_name, session)
2692
2693                 # Strip out anything that isn't '%s' or '/%s$'
2694                 for i in ql:
2695                     if not i.filename.endswith(dsc_name):
2696                         ql.remove(i)
2697
2698                 # "[dak] has not broken them.  [dak] has fixed a
2699                 # brokenness.  Your crappy hack exploited a bug in
2700                 # the old dinstall.
2701                 #
2702                 # "(Come on!  I thought it was always obvious that
2703                 # one just doesn't release different files with
2704                 # the same name and version.)"
2705                 #                        -- ajk@ on d-devel@l.d.o
2706
2707                 if len(ql) > 0:
2708                     # Ignore exact matches for .orig.tar.gz
2709                     match = 0
2710                     if re_is_orig_source.match(dsc_name):
2711                         for i in ql:
2712                             if self.pkg.files.has_key(dsc_name) and \
2713                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2714                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2715                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2716                                 # TODO: Don't delete the entry, just mark it as not needed
2717                                 # This would fix the stupidity of changing something we often iterate over
2718                                 # whilst we're doing it
2719                                 del self.pkg.files[dsc_name]
2720                                 dsc_entry["files id"] = i.file_id
2721                                 if not orig_files.has_key(dsc_name):
2722                                     orig_files[dsc_name] = {}
2723                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2724                                 match = 1
2725
2726                                 # Don't bitch that we couldn't find this file later
2727                                 try:
2728                                     self.later_check_files.remove(dsc_name)
2729                                 except ValueError:
2730                                     pass
2731
2732
2733                     if not match:
2734                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2735
2736             elif re_is_orig_source.match(dsc_name):
2737                 # Check in the pool
2738                 ql = get_poolfile_like_name(dsc_name, session)
2739
2740                 # Strip out anything that isn't '%s' or '/%s$'
2741                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2742                 for i in ql:
2743                     if not i.filename.endswith(dsc_name):
2744                         ql.remove(i)
2745
2746                 if len(ql) > 0:
2747                     # Unfortunately, we may get more than one match here if,
2748                     # for example, the package was in potato but had an -sa
2749                     # upload in woody.  So we need to choose the right one.
2750
2751                     # default to something sane in case we don't match any or have only one
2752                     x = ql[0]
2753
2754                     if len(ql) > 1:
2755                         for i in ql:
2756                             old_file = os.path.join(i.location.path, i.filename)
2757                             old_file_fh = utils.open_file(old_file)
2758                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2759                             old_file_fh.close()
2760                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2761                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2762                                 x = i
2763
2764                     old_file = os.path.join(i.location.path, i.filename)
2765                     old_file_fh = utils.open_file(old_file)
2766                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2767                     old_file_fh.close()
2768                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2769                     found = old_file
2770                     suite_type = x.location.archive_type
2771                     # need this for updating dsc_files in install()
2772                     dsc_entry["files id"] = x.file_id
2773                     # See install() in process-accepted...
2774                     if not orig_files.has_key(dsc_name):
2775                         orig_files[dsc_name] = {}
2776                     orig_files[dsc_name]["id"] = x.file_id
2777                     orig_files[dsc_name]["path"] = old_file
2778                     orig_files[dsc_name]["location"] = x.location.location_id
2779                 else:
2780                     # TODO: Determine queue list dynamically
2781                     # Not there? Check the queue directories...
2782                     for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2783                         queue = get_policy_queue(queue_name, session)
2784                         if not queue:
2785                             continue
2786
2787                         in_otherdir = os.path.join(queue.path, dsc_name)
2788
2789                         if os.path.exists(in_otherdir):
2790                             in_otherdir_fh = utils.open_file(in_otherdir)
2791                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2792                             in_otherdir_fh.close()
2793                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2794                             found = in_otherdir
2795                             if not orig_files.has_key(dsc_name):
2796                                 orig_files[dsc_name] = {}
2797                             orig_files[dsc_name]["path"] = in_otherdir
2798
2799                     if not found:
2800                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2801                         continue
2802             else:
2803                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2804                 continue
2805             if actual_md5 != dsc_entry["md5sum"]:
2806                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2807             if actual_size != int(dsc_entry["size"]):
2808                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2809
2810     ################################################################################
2811     # This is used by process-new and process-holding to recheck a changes file
2812     # at the time we're running.  It mainly wraps various other internal functions
2813     # and is similar to accepted_checks - these should probably be tidied up
2814     # and combined
2815     def recheck(self, session):
2816         cnf = Config()
2817         for f in self.pkg.files.keys():
2818             # The .orig.tar.gz can disappear out from under us is it's a
2819             # duplicate of one in the archive.
2820             if not self.pkg.files.has_key(f):
2821                 continue
2822
2823             entry = self.pkg.files[f]
2824
2825             # Check that the source still exists
2826             if entry["type"] == "deb":
2827                 source_version = entry["source version"]
2828                 source_package = entry["source package"]
2829                 if not self.pkg.changes["architecture"].has_key("source") \
2830                    and not source_exists(source_package, source_version, \
2831                     suites = self.pkg.changes["distribution"].keys(), session = session):
2832                     source_epochless_version = re_no_epoch.sub('', source_version)
2833                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2834                     found = False
2835                     for queue_name in ["embargoed", "unembargoed", "newstage"]:
2836                         queue = get_policy_queue(queue_name, session)
2837                         if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2838                             found = True
2839                     if not found:
2840                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2841
2842             # Version and file overwrite checks
2843             if entry["type"] == "deb":
2844                 self.check_binary_against_db(f, session)
2845             elif entry["type"] == "dsc":
2846                 self.check_source_against_db(f, session)
2847                 self.check_dsc_against_db(f, session)
2848
2849     ################################################################################
2850     def accepted_checks(self, overwrite_checks, session):
2851         # Recheck anything that relies on the database; since that's not
2852         # frozen between accept and our run time when called from p-a.
2853
2854         # overwrite_checks is set to False when installing to stable/oldstable
2855
2856         propogate={}
2857         nopropogate={}
2858
2859         # Find the .dsc (again)
2860         dsc_filename = None
2861         for f in self.pkg.files.keys():
2862             if self.pkg.files[f]["type"] == "dsc":
2863                 dsc_filename = f
2864
2865         for checkfile in self.pkg.files.keys():
2866             # The .orig.tar.gz can disappear out from under us is it's a
2867             # duplicate of one in the archive.
2868             if not self.pkg.files.has_key(checkfile):
2869                 continue
2870
2871             entry = self.pkg.files[checkfile]
2872
2873             # Check that the source still exists
2874             if entry["type"] == "deb":
2875                 source_version = entry["source version"]
2876                 source_package = entry["source package"]
2877                 if not self.pkg.changes["architecture"].has_key("source") \
2878                    and not source_exists(source_package, source_version, \
2879                     suites = self.pkg.changes["distribution"].keys(), \
2880                     session = session):
2881                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2882
2883             # Version and file overwrite checks
2884             if overwrite_checks:
2885                 if entry["type"] == "deb":
2886                     self.check_binary_against_db(checkfile, session)
2887                 elif entry["type"] == "dsc":
2888                     self.check_source_against_db(checkfile, session)
2889                     self.check_dsc_against_db(dsc_filename, session)
2890
2891             # propogate in the case it is in the override tables:
2892             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2893                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2894                     propogate[suite] = 1
2895                 else:
2896                     nopropogate[suite] = 1
2897
2898         for suite in propogate.keys():
2899             if suite in nopropogate:
2900                 continue
2901             self.pkg.changes["distribution"][suite] = 1
2902
2903         for checkfile in self.pkg.files.keys():
2904             # Check the package is still in the override tables
2905             for suite in self.pkg.changes["distribution"].keys():
2906                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2907                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2908
2909     ################################################################################
2910     # If any file of an upload has a recent mtime then chances are good
2911     # the file is still being uploaded.
2912
2913     def upload_too_new(self):
2914         cnf = Config()
2915         too_new = False
2916         # Move back to the original directory to get accurate time stamps
2917         cwd = os.getcwd()
2918         os.chdir(self.pkg.directory)
2919         file_list = self.pkg.files.keys()
2920         file_list.extend(self.pkg.dsc_files.keys())
2921         file_list.append(self.pkg.changes_file)
2922         for f in file_list:
2923             try:
2924                 last_modified = time.time()-os.path.getmtime(f)
2925                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2926                     too_new = True
2927                     break
2928             except:
2929                 pass
2930
2931         os.chdir(cwd)
2932         return too_new
2933
2934     def store_changelog(self):
2935
2936         # Skip binary-only upload if it is not a bin-NMU
2937         if not self.pkg.changes['architecture'].has_key('source'):
2938             from daklib.regexes import re_bin_only_nmu
2939             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2940                 return
2941
2942         session = DBConn().session()
2943
2944         # Check if upload already has a changelog entry
2945         query = """SELECT changelog_id FROM changes WHERE source = :source
2946                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2947         if session.execute(query, {'source': self.pkg.changes['source'], \
2948                                    'version': self.pkg.changes['version'], \
2949                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2950             session.commit()
2951             return
2952
2953         # Add current changelog text into changelogs_text table, return created ID
2954         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2955         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2956
2957         # Link ID to the upload available in changes table
2958         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2959                    AND version = :version AND architecture = :architecture"""
2960         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2961                                 'version': self.pkg.changes['version'], \
2962                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2963
2964         session.commit()