]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Clean up boolean logic
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_set(dsc, session).items():
137             if not new.has_key(package):
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = MTime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = MTime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         if cnf.has_key("Dinstall::BugServer"):
447             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError, line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError, line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError, format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError, msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError, msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not Cnf.has_key("Suite::%s" % (suite)):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError, e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
883                     entry["byhand"] = 1
884                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
885                     entry["new"] = 1
886                 else:
887                     dsc_file_exists = False
888                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
889                         if cnf.has_key("Dir::Queue::%s" % (myq)):
890                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
891                                 dsc_file_exists = True
892                                 break
893
894                     if not dsc_file_exists:
895                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
896
897         # Check the version and for file overwrites
898         self.check_binary_against_db(f, session)
899
900     def source_file_checks(self, f, session):
901         entry = self.pkg.files[f]
902
903         m = re_issource.match(f)
904         if not m:
905             return
906
907         entry["package"] = m.group(1)
908         entry["version"] = m.group(2)
909         entry["type"] = m.group(3)
910
911         # Ensure the source package name matches the Source filed in the .changes
912         if self.pkg.changes["source"] != entry["package"]:
913             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
914
915         # Ensure the source version matches the version in the .changes file
916         if re_is_orig_source.match(f):
917             changes_version = self.pkg.changes["chopversion2"]
918         else:
919             changes_version = self.pkg.changes["chopversion"]
920
921         if changes_version != entry["version"]:
922             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
923
924         # Ensure the .changes lists source in the Architecture field
925         if not self.pkg.changes["architecture"].has_key("source"):
926             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
927
928         # Check the signature of a .dsc file
929         if entry["type"] == "dsc":
930             # check_signature returns either:
931             #  (None, [list, of, rejects]) or (signature, [])
932             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
933             for j in rejects:
934                 self.rejects.append(j)
935
936         entry["architecture"] = "source"
937
938     def per_suite_file_checks(self, f, suite, session):
939         cnf = Config()
940         entry = self.pkg.files[f]
941
942         # Skip byhand
943         if entry.has_key("byhand"):
944             return
945
946         # Check we have fields we need to do these checks
947         oktogo = True
948         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949             if not entry.has_key(m):
950                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
951                 oktogo = False
952
953         if not oktogo:
954             return
955
956         # Handle component mappings
957         for m in cnf.ValueList("ComponentMappings"):
958             (source, dest) = m.split()
959             if entry["component"] == source:
960                 entry["original component"] = source
961                 entry["component"] = dest
962
963         # Ensure the component is valid for the target suite
964         if cnf.has_key("Suite:%s::Components" % (suite)) and \
965            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
966             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
967             return
968
969         # Validate the component
970         if not get_component(entry["component"], session):
971             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
972             return
973
974         # See if the package is NEW
975         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
976             entry["new"] = 1
977
978         # Validate the priority
979         if entry["priority"].find('/') != -1:
980             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
981
982         # Determine the location
983         location = cnf["Dir::Pool"]
984         l = get_location(location, entry["component"], session=session)
985         if l is None:
986             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
987             entry["location id"] = -1
988         else:
989             entry["location id"] = l.location_id
990
991         # Check the md5sum & size against existing files (if any)
992         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
993
994         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
995                                          entry["size"], entry["md5sum"], entry["location id"])
996
997         if found is None:
998             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
999         elif found is False and poolfile is not None:
1000             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1001         else:
1002             if poolfile is None:
1003                 entry["files id"] = None
1004             else:
1005                 entry["files id"] = poolfile.file_id
1006
1007         # Check for packages that have moved from one component to another
1008         entry['suite'] = suite
1009         arch_list = [entry["architecture"], 'all']
1010         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1011             [suite], arch_list = arch_list, session = session)
1012         if component is not None:
1013             entry["othercomponents"] = component
1014
1015     def check_files(self, action=True):
1016         file_keys = self.pkg.files.keys()
1017         holding = Holding()
1018         cnf = Config()
1019
1020         if action:
1021             cwd = os.getcwd()
1022             os.chdir(self.pkg.directory)
1023             for f in file_keys:
1024                 ret = holding.copy_to_holding(f)
1025                 if ret is not None:
1026                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1027
1028             os.chdir(cwd)
1029
1030         # check we already know the changes file
1031         # [NB: this check must be done post-suite mapping]
1032         base_filename = os.path.basename(self.pkg.changes_file)
1033
1034         session = DBConn().session()
1035
1036         try:
1037             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1038             # if in the pool or in a queue other than unchecked, reject
1039             if (dbc.in_queue is None) \
1040                    or (dbc.in_queue is not None
1041                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1042                 self.rejects.append("%s file already known to dak" % base_filename)
1043         except NoResultFound, e:
1044             # not known, good
1045             pass
1046
1047         has_binaries = False
1048         has_source = False
1049
1050         for f, entry in self.pkg.files.items():
1051             # Ensure the file does not already exist in one of the accepted directories
1052             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1053                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1054                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1055                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1056
1057             if not re_taint_free.match(f):
1058                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1059
1060             # Check the file is readable
1061             if os.access(f, os.R_OK) == 0:
1062                 # When running in -n, copy_to_holding() won't have
1063                 # generated the reject_message, so we need to.
1064                 if action:
1065                     if os.path.exists(f):
1066                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1067                     else:
1068                         # Don't directly reject, mark to check later to deal with orig's
1069                         # we can find in the pool
1070                         self.later_check_files.append(f)
1071                 entry["type"] = "unreadable"
1072                 continue
1073
1074             # If it's byhand skip remaining checks
1075             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1076                 entry["byhand"] = 1
1077                 entry["type"] = "byhand"
1078
1079             # Checks for a binary package...
1080             elif re_isadeb.match(f):
1081                 has_binaries = True
1082                 entry["type"] = "deb"
1083
1084                 # This routine appends to self.rejects/warnings as appropriate
1085                 self.binary_file_checks(f, session)
1086
1087             # Checks for a source package...
1088             elif re_issource.match(f):
1089                 has_source = True
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.source_file_checks(f, session)
1093
1094             # Not a binary or source package?  Assume byhand...
1095             else:
1096                 entry["byhand"] = 1
1097                 entry["type"] = "byhand"
1098
1099             # Per-suite file checks
1100             entry["oldfiles"] = {}
1101             for suite in self.pkg.changes["distribution"].keys():
1102                 self.per_suite_file_checks(f, suite, session)
1103
1104         session.close()
1105
1106         # If the .changes file says it has source, it must have source.
1107         if self.pkg.changes["architecture"].has_key("source"):
1108             if not has_source:
1109                 self.rejects.append("no source found and Architecture line in changes mention source.")
1110
1111             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1112                 self.rejects.append("source only uploads are not supported.")
1113
1114     ###########################################################################
1115
1116     def __dsc_filename(self):
1117         """
1118         Returns: (Status, Dsc_Filename)
1119         where
1120           Status: Boolean; True when there was no error, False otherwise
1121           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1122         """
1123         dsc_filename = None
1124
1125         # find the dsc
1126         for name, entry in self.pkg.files.items():
1127             if entry.has_key("type") and entry["type"] == "dsc":
1128                 if dsc_filename:
1129                     return False, "cannot process a .changes file with multiple .dsc's."
1130                 else:
1131                     dsc_filename = name
1132
1133         if not dsc_filename:
1134             return False, "source uploads must contain a dsc file"
1135
1136         return True, dsc_filename
1137
1138     def load_dsc(self, action=True, signing_rules=1):
1139         """
1140         Find and load the dsc from self.pkg.files into self.dsc
1141
1142         Returns: (Status, Reason)
1143         where
1144           Status: Boolean; True when there was no error, False otherwise
1145           Reason: String; When Status is False this describes the error
1146         """
1147
1148         # find the dsc
1149         (status, dsc_filename) = self.__dsc_filename()
1150         if not status:
1151             # If status is false, dsc_filename has the reason
1152             return False, dsc_filename
1153
1154         try:
1155             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1156         except CantOpenError:
1157             if not action:
1158                 return False, "%s: can't read file." % (dsc_filename)
1159         except ParseChangesError, line:
1160             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1161         except InvalidDscError, line:
1162             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1163         except ChangesUnicodeError:
1164             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1165
1166         return True, None
1167
1168     ###########################################################################
1169
1170     def check_dsc(self, action=True, session=None):
1171         """Returns bool indicating whether or not the source changes are valid"""
1172         # Ensure there is source to check
1173         if not self.pkg.changes["architecture"].has_key("source"):
1174             return True
1175
1176         (status, reason) = self.load_dsc(action=action)
1177         if not status:
1178             self.rejects.append(reason)
1179             return False
1180         (status, dsc_filename) = self.__dsc_filename()
1181         if not status:
1182             # If status is false, dsc_filename has the reason
1183             self.rejects.append(dsc_filename)
1184             return False
1185
1186         # Build up the file list of files mentioned by the .dsc
1187         try:
1188             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1189         except NoFilesFieldError:
1190             self.rejects.append("%s: no Files: field." % (dsc_filename))
1191             return False
1192         except UnknownFormatError, format:
1193             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1194             return False
1195         except ParseChangesError, line:
1196             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1197             return False
1198
1199         # Enforce mandatory fields
1200         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1201             if not self.pkg.dsc.has_key(i):
1202                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1203                 return False
1204
1205         # Validate the source and version fields
1206         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1207             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1208         if not re_valid_version.match(self.pkg.dsc["version"]):
1209             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1210
1211         # Only a limited list of source formats are allowed in each suite
1212         for dist in self.pkg.changes["distribution"].keys():
1213             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1214             if self.pkg.dsc["format"] not in allowed:
1215                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1216
1217         # Validate the Maintainer field
1218         try:
1219             # We ignore the return value
1220             fix_maintainer(self.pkg.dsc["maintainer"])
1221         except ParseMaintError, msg:
1222             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1223                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1224
1225         # Validate the build-depends field(s)
1226         for field_name in [ "build-depends", "build-depends-indep" ]:
1227             field = self.pkg.dsc.get(field_name)
1228             if field:
1229                 # Have apt try to parse them...
1230                 try:
1231                     apt_pkg.ParseSrcDepends(field)
1232                 except:
1233                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1234
1235         # Ensure the version number in the .dsc matches the version number in the .changes
1236         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1237         changes_version = self.pkg.files[dsc_filename]["version"]
1238
1239         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1240             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1241
1242         # Ensure the Files field contain only what's expected
1243         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1244
1245         # Ensure source is newer than existing source in target suites
1246         session = DBConn().session()
1247         self.check_source_against_db(dsc_filename, session)
1248         self.check_dsc_against_db(dsc_filename, session)
1249
1250         dbchg = get_dbchange(self.pkg.changes_file, session)
1251
1252         # Finally, check if we're missing any files
1253         for f in self.later_check_files:
1254             print 'XXX: %s' % f
1255             # Check if we've already processed this file if we have a dbchg object
1256             ok = False
1257             if dbchg:
1258                 for pf in dbchg.files:
1259                     if pf.filename == f and pf.processed:
1260                         self.notes.append('%s was already processed so we can go ahead' % f)
1261                         ok = True
1262                         del self.pkg.files[f]
1263             if not ok:
1264                 self.rejects.append("Could not find file %s references in changes" % f)
1265
1266         session.close()
1267
1268         return (len(self.rejects) == 0)
1269
1270     ###########################################################################
1271
1272     def get_changelog_versions(self, source_dir):
1273         """Extracts a the source package and (optionally) grabs the
1274         version history out of debian/changelog for the BTS."""
1275
1276         cnf = Config()
1277
1278         # Find the .dsc (again)
1279         dsc_filename = None
1280         for f in self.pkg.files.keys():
1281             if self.pkg.files[f]["type"] == "dsc":
1282                 dsc_filename = f
1283
1284         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1285         if not dsc_filename:
1286             return
1287
1288         # Create a symlink mirror of the source files in our temporary directory
1289         for f in self.pkg.files.keys():
1290             m = re_issource.match(f)
1291             if m:
1292                 src = os.path.join(source_dir, f)
1293                 # If a file is missing for whatever reason, give up.
1294                 if not os.path.exists(src):
1295                     return
1296                 ftype = m.group(3)
1297                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1298                    self.pkg.orig_files[f].has_key("path"):
1299                     continue
1300                 dest = os.path.join(os.getcwd(), f)
1301                 os.symlink(src, dest)
1302
1303         # If the orig files are not a part of the upload, create symlinks to the
1304         # existing copies.
1305         for orig_file in self.pkg.orig_files.keys():
1306             if not self.pkg.orig_files[orig_file].has_key("path"):
1307                 continue
1308             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1309             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1310
1311         # Extract the source
1312         try:
1313             unpacked = UnpackedSource(dsc_filename)
1314         except:
1315             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1316             return
1317
1318         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1319             return
1320
1321         # Get the upstream version
1322         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1323         if re_strip_revision.search(upstr_version):
1324             upstr_version = re_strip_revision.sub('', upstr_version)
1325
1326         # Ensure the changelog file exists
1327         changelog_file = unpacked.get_changelog_file()
1328         if changelog_file is None:
1329             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1330             return
1331
1332         # Parse the changelog
1333         self.pkg.dsc["bts changelog"] = ""
1334         for line in changelog_file.readlines():
1335             m = re_changelog_versions.match(line)
1336             if m:
1337                 self.pkg.dsc["bts changelog"] += line
1338         changelog_file.close()
1339         unpacked.cleanup()
1340
1341         # Check we found at least one revision in the changelog
1342         if not self.pkg.dsc["bts changelog"]:
1343             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1344
1345     def check_source(self):
1346         # Bail out if:
1347         #    a) there's no source
1348         if not self.pkg.changes["architecture"].has_key("source"):
1349             return
1350
1351         tmpdir = utils.temp_dirname()
1352
1353         # Move into the temporary directory
1354         cwd = os.getcwd()
1355         os.chdir(tmpdir)
1356
1357         # Get the changelog version history
1358         self.get_changelog_versions(cwd)
1359
1360         # Move back and cleanup the temporary tree
1361         os.chdir(cwd)
1362
1363         try:
1364             shutil.rmtree(tmpdir)
1365         except OSError, e:
1366             if e.errno != errno.EACCES:
1367                 print "foobar"
1368                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1369
1370             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1371             # We probably have u-r or u-w directories so chmod everything
1372             # and try again.
1373             cmd = "chmod -R u+rwx %s" % (tmpdir)
1374             result = os.system(cmd)
1375             if result != 0:
1376                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1377             shutil.rmtree(tmpdir)
1378         except Exception, e:
1379             print "foobar2 (%s)" % e
1380             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1381
1382     ###########################################################################
1383     def ensure_hashes(self):
1384         # Make sure we recognise the format of the Files: field in the .changes
1385         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1386         if len(format) == 2:
1387             format = int(format[0]), int(format[1])
1388         else:
1389             format = int(float(format[0])), 0
1390
1391         # We need to deal with the original changes blob, as the fields we need
1392         # might not be in the changes dict serialised into the .dak anymore.
1393         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1394
1395         # Copy the checksums over to the current changes dict.  This will keep
1396         # the existing modifications to it intact.
1397         for field in orig_changes:
1398             if field.startswith('checksums-'):
1399                 self.pkg.changes[field] = orig_changes[field]
1400
1401         # Check for unsupported hashes
1402         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1403             self.rejects.append(j)
1404
1405         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1406             self.rejects.append(j)
1407
1408         # We have to calculate the hash if we have an earlier changes version than
1409         # the hash appears in rather than require it exist in the changes file
1410         for hashname, hashfunc, version in utils.known_hashes:
1411             # TODO: Move _ensure_changes_hash into this class
1412             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1413                 self.rejects.append(j)
1414             if "source" in self.pkg.changes["architecture"]:
1415                 # TODO: Move _ensure_dsc_hash into this class
1416                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1417                     self.rejects.append(j)
1418
1419     def check_hashes(self):
1420         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1421             self.rejects.append(m)
1422
1423         for m in utils.check_size(".changes", self.pkg.files):
1424             self.rejects.append(m)
1425
1426         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1427             self.rejects.append(m)
1428
1429         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1430             self.rejects.append(m)
1431
1432         self.ensure_hashes()
1433
1434     ###########################################################################
1435
1436     def ensure_orig(self, target_dir='.', session=None):
1437         """
1438         Ensures that all orig files mentioned in the changes file are present
1439         in target_dir. If they do not exist, they are symlinked into place.
1440
1441         An list containing the symlinks that were created are returned (so they
1442         can be removed).
1443         """
1444
1445         symlinked = []
1446         cnf = Config()
1447
1448         for filename, entry in self.pkg.dsc_files.iteritems():
1449             if not re_is_orig_source.match(filename):
1450                 # File is not an orig; ignore
1451                 continue
1452
1453             if os.path.exists(filename):
1454                 # File exists, no need to continue
1455                 continue
1456
1457             def symlink_if_valid(path):
1458                 f = utils.open_file(path)
1459                 md5sum = apt_pkg.md5sum(f)
1460                 f.close()
1461
1462                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1463                 expected = (int(entry['size']), entry['md5sum'])
1464
1465                 if fingerprint != expected:
1466                     return False
1467
1468                 dest = os.path.join(target_dir, filename)
1469
1470                 os.symlink(path, dest)
1471                 symlinked.append(dest)
1472
1473                 return True
1474
1475             session_ = session
1476             if session is None:
1477                 session_ = DBConn().session()
1478
1479             found = False
1480
1481             # Look in the pool
1482             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1483                 poolfile_path = os.path.join(
1484                     poolfile.location.path, poolfile.filename
1485                 )
1486
1487                 if symlink_if_valid(poolfile_path):
1488                     found = True
1489                     break
1490
1491             if session is None:
1492                 session_.close()
1493
1494             if found:
1495                 continue
1496
1497             # Look in some other queues for the file
1498             queues = ('New', 'Byhand', 'ProposedUpdates',
1499                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1500
1501             for queue in queues:
1502                 if not cnf.get('Dir::Queue::%s' % queue):
1503                     continue
1504
1505                 queuefile_path = os.path.join(
1506                     cnf['Dir::Queue::%s' % queue], filename
1507                 )
1508
1509                 if not os.path.exists(queuefile_path):
1510                     # Does not exist in this queue
1511                     continue
1512
1513                 if symlink_if_valid(queuefile_path):
1514                     break
1515
1516         return symlinked
1517
1518     ###########################################################################
1519
1520     def check_lintian(self):
1521         """
1522         Extends self.rejects by checking the output of lintian against tags
1523         specified in Dinstall::LintianTags.
1524         """
1525
1526         cnf = Config()
1527
1528         # Don't reject binary uploads
1529         if not self.pkg.changes['architecture'].has_key('source'):
1530             return
1531
1532         # Only check some distributions
1533         for dist in ('unstable', 'experimental'):
1534             if dist in self.pkg.changes['distribution']:
1535                 break
1536         else:
1537             return
1538
1539         # If we do not have a tagfile, don't do anything
1540         tagfile = cnf.get("Dinstall::LintianTags")
1541         if not tagfile:
1542             return
1543
1544         # Parse the yaml file
1545         sourcefile = file(tagfile, 'r')
1546         sourcecontent = sourcefile.read()
1547         sourcefile.close()
1548
1549         try:
1550             lintiantags = yaml.load(sourcecontent)['lintian']
1551         except yaml.YAMLError, msg:
1552             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1553             return
1554
1555         # Try and find all orig mentioned in the .dsc
1556         symlinked = self.ensure_orig()
1557
1558         # Setup the input file for lintian
1559         fd, temp_filename = utils.temp_filename()
1560         temptagfile = os.fdopen(fd, 'w')
1561         for tags in lintiantags.values():
1562             temptagfile.writelines(['%s\n' % x for x in tags])
1563         temptagfile.close()
1564
1565         try:
1566             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1567                 (temp_filename, self.pkg.changes_file)
1568
1569             result, output = commands.getstatusoutput(cmd)
1570         finally:
1571             # Remove our tempfile and any symlinks we created
1572             os.unlink(temp_filename)
1573
1574             for symlink in symlinked:
1575                 os.unlink(symlink)
1576
1577         if result == 2:
1578             utils.warn("lintian failed for %s [return code: %s]." % \
1579                 (self.pkg.changes_file, result))
1580             utils.warn(utils.prefix_multi_line_string(output, \
1581                 " [possible output:] "))
1582
1583         def log(*txt):
1584             if self.logger:
1585                 self.logger.log(
1586                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1587                 )
1588
1589         # Generate messages
1590         parsed_tags = parse_lintian_output(output)
1591         self.rejects.extend(
1592             generate_reject_messages(parsed_tags, lintiantags, log=log)
1593         )
1594
1595     ###########################################################################
1596     def check_urgency(self):
1597         cnf = Config()
1598         if self.pkg.changes["architecture"].has_key("source"):
1599             if not self.pkg.changes.has_key("urgency"):
1600                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1601             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1602             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1603                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1604                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1605                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1606
1607     ###########################################################################
1608
1609     # Sanity check the time stamps of files inside debs.
1610     # [Files in the near future cause ugly warnings and extreme time
1611     #  travel can cause errors on extraction]
1612
1613     def check_timestamps(self):
1614         Cnf = Config()
1615
1616         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1617         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1618         tar = TarTime(future_cutoff, past_cutoff)
1619
1620         for filename, entry in self.pkg.files.items():
1621             if entry["type"] == "deb":
1622                 tar.reset()
1623                 try:
1624                     deb = apt_inst.DebFile(filename)
1625                     deb.control.go(tar.callback)
1626
1627                     future_files = tar.future_files.keys()
1628                     if future_files:
1629                         num_future_files = len(future_files)
1630                         future_file = future_files[0]
1631                         future_date = tar.future_files[future_file]
1632                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1633                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1634
1635                     ancient_files = tar.ancient_files.keys()
1636                     if ancient_files:
1637                         num_ancient_files = len(ancient_files)
1638                         ancient_file = ancient_files[0]
1639                         ancient_date = tar.ancient_files[ancient_file]
1640                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1641                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1642                 except:
1643                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1644
1645     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1646         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1647             sponsored = False
1648         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1649             sponsored = False
1650             if uid_name == "":
1651                 sponsored = True
1652         else:
1653             sponsored = True
1654             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1655                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1656                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1657                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1658                         self.pkg.changes["sponsoremail"] = uid_email
1659
1660         return sponsored
1661
1662
1663     ###########################################################################
1664     # check_signed_by_key checks
1665     ###########################################################################
1666
1667     def check_signed_by_key(self):
1668         """Ensure the .changes is signed by an authorized uploader."""
1669         session = DBConn().session()
1670
1671         # First of all we check that the person has proper upload permissions
1672         # and that this upload isn't blocked
1673         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1674
1675         if fpr is None:
1676             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1677             return
1678
1679         # TODO: Check that import-keyring adds UIDs properly
1680         if not fpr.uid:
1681             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1682             return
1683
1684         # Check that the fingerprint which uploaded has permission to do so
1685         self.check_upload_permissions(fpr, session)
1686
1687         # Check that this package is not in a transition
1688         self.check_transition(session)
1689
1690         session.close()
1691
1692
1693     def check_upload_permissions(self, fpr, session):
1694         # Check any one-off upload blocks
1695         self.check_upload_blocks(fpr, session)
1696
1697         # If the source_acl is None, source is never allowed
1698         if fpr.source_acl is None:
1699             if self.pkg.changes["architecture"].has_key("source"):
1700                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1701                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1702                 self.rejects.append(rej)
1703                 return
1704         # Do DM as a special case
1705         # DM is a special case unfortunately, so we check it first
1706         # (keys with no source access get more access than DMs in one
1707         #  way; DMs can only upload for their packages whether source
1708         #  or binary, whereas keys with no access might be able to
1709         #  upload some binaries)
1710         elif fpr.source_acl.access_level == 'dm':
1711             self.check_dm_upload(fpr, session)
1712         else:
1713             # If not a DM, we allow full upload rights
1714             uid_email = "%s@debian.org" % (fpr.uid.uid)
1715             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1716
1717
1718         # Check binary upload permissions
1719         # By this point we know that DMs can't have got here unless they
1720         # are allowed to deal with the package concerned so just apply
1721         # normal checks
1722         if fpr.binary_acl.access_level == 'full':
1723             return
1724
1725         # Otherwise we're in the map case
1726         tmparches = self.pkg.changes["architecture"].copy()
1727         tmparches.pop('source', None)
1728
1729         for bam in fpr.binary_acl_map:
1730             tmparches.pop(bam.architecture.arch_string, None)
1731
1732         if len(tmparches.keys()) > 0:
1733             if fpr.binary_reject:
1734                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1735                 if len(tmparches.keys()) == 1:
1736                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1737                 else:
1738                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1739                 self.rejects.append(rej)
1740             else:
1741                 # TODO: This is where we'll implement reject vs throw away binaries later
1742                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1743                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1744                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1745                 self.rejects.append(rej)
1746
1747
1748     def check_upload_blocks(self, fpr, session):
1749         """Check whether any upload blocks apply to this source, source
1750            version, uid / fpr combination"""
1751
1752         def block_rej_template(fb):
1753             rej = 'Manual upload block in place for package %s' % fb.source
1754             if fb.version is not None:
1755                 rej += ', version %s' % fb.version
1756             return rej
1757
1758         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1759             # version is None if the block applies to all versions
1760             if fb.version is None or fb.version == self.pkg.changes['version']:
1761                 # Check both fpr and uid - either is enough to cause a reject
1762                 if fb.fpr is not None:
1763                     if fb.fpr.fingerprint == fpr.fingerprint:
1764                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1765                 if fb.uid is not None:
1766                     if fb.uid == fpr.uid:
1767                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1768
1769
1770     def check_dm_upload(self, fpr, session):
1771         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1772         ## none of the uploaded packages are NEW
1773         rej = False
1774         for f in self.pkg.files.keys():
1775             if self.pkg.files[f].has_key("byhand"):
1776                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1777                 rej = True
1778             if self.pkg.files[f].has_key("new"):
1779                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1780                 rej = True
1781
1782         if rej:
1783             return
1784
1785         r = get_newest_source(self.pkg.changes["source"], session)
1786
1787         if r is None:
1788             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1789             self.rejects.append(rej)
1790             return
1791
1792         if not r.dm_upload_allowed:
1793             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1794             self.rejects.append(rej)
1795             return
1796
1797         ## the Maintainer: field of the uploaded .changes file corresponds with
1798         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1799         ## uploads)
1800         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1801             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1802
1803         ## the most recent version of the package uploaded to unstable or
1804         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1805         ## non-developer maintainers cannot NMU or hijack packages)
1806
1807         # uploader includes the maintainer
1808         accept = False
1809         for uploader in r.uploaders:
1810             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1811             # Eww - I hope we never have two people with the same name in Debian
1812             if email == fpr.uid.uid or name == fpr.uid.name:
1813                 accept = True
1814                 break
1815
1816         if not accept:
1817             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1818             return
1819
1820         ## none of the packages are being taken over from other source packages
1821         for b in self.pkg.changes["binary"].keys():
1822             for suite in self.pkg.changes["distribution"].keys():
1823                 for s in get_source_by_package_and_suite(b, suite, session):
1824                     if s.source != self.pkg.changes["source"]:
1825                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1826
1827
1828
1829     def check_transition(self, session):
1830         cnf = Config()
1831
1832         sourcepkg = self.pkg.changes["source"]
1833
1834         # No sourceful upload -> no need to do anything else, direct return
1835         # We also work with unstable uploads, not experimental or those going to some
1836         # proposed-updates queue
1837         if "source" not in self.pkg.changes["architecture"] or \
1838            "unstable" not in self.pkg.changes["distribution"]:
1839             return
1840
1841         # Also only check if there is a file defined (and existant) with
1842         # checks.
1843         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1844         if transpath == "" or not os.path.exists(transpath):
1845             return
1846
1847         # Parse the yaml file
1848         sourcefile = file(transpath, 'r')
1849         sourcecontent = sourcefile.read()
1850         try:
1851             transitions = yaml.load(sourcecontent)
1852         except yaml.YAMLError, msg:
1853             # This shouldn't happen, there is a wrapper to edit the file which
1854             # checks it, but we prefer to be safe than ending up rejecting
1855             # everything.
1856             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1857             return
1858
1859         # Now look through all defined transitions
1860         for trans in transitions:
1861             t = transitions[trans]
1862             source = t["source"]
1863             expected = t["new"]
1864
1865             # Will be None if nothing is in testing.
1866             current = get_source_in_suite(source, "testing", session)
1867             if current is not None:
1868                 compare = apt_pkg.VersionCompare(current.version, expected)
1869
1870             if current is None or compare < 0:
1871                 # This is still valid, the current version in testing is older than
1872                 # the new version we wait for, or there is none in testing yet
1873
1874                 # Check if the source we look at is affected by this.
1875                 if sourcepkg in t['packages']:
1876                     # The source is affected, lets reject it.
1877
1878                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1879                         sourcepkg, trans)
1880
1881                     if current is not None:
1882                         currentlymsg = "at version %s" % (current.version)
1883                     else:
1884                         currentlymsg = "not present in testing"
1885
1886                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1887
1888                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1889 is part of a testing transition designed to get %s migrated (it is
1890 currently %s, we need version %s).  This transition is managed by the
1891 Release Team, and %s is the Release-Team member responsible for it.
1892 Please mail debian-release@lists.debian.org or contact %s directly if you
1893 need further assistance.  You might want to upload to experimental until this
1894 transition is done."""
1895                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1896
1897                     self.rejects.append(rejectmsg)
1898                     return
1899
1900     ###########################################################################
1901     # End check_signed_by_key checks
1902     ###########################################################################
1903
1904     def build_summaries(self):
1905         """ Build a summary of changes the upload introduces. """
1906
1907         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1908
1909         short_summary = summary
1910
1911         # This is for direport's benefit...
1912         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1913
1914         if byhand or new:
1915             summary += "Changes: " + f
1916
1917         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1918
1919         summary += self.announce(short_summary, 0)
1920
1921         return (summary, short_summary)
1922
1923     ###########################################################################
1924
1925     def close_bugs(self, summary, action):
1926         """
1927         Send mail to close bugs as instructed by the closes field in the changes file.
1928         Also add a line to summary if any work was done.
1929
1930         @type summary: string
1931         @param summary: summary text, as given by L{build_summaries}
1932
1933         @type action: bool
1934         @param action: Set to false no real action will be done.
1935
1936         @rtype: string
1937         @return: summary. If action was taken, extended by the list of closed bugs.
1938
1939         """
1940
1941         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1942
1943         bugs = self.pkg.changes["closes"].keys()
1944
1945         if not bugs:
1946             return summary
1947
1948         bugs.sort()
1949         summary += "Closing bugs: "
1950         for bug in bugs:
1951             summary += "%s " % (bug)
1952             if action:
1953                 self.update_subst()
1954                 self.Subst["__BUG_NUMBER__"] = bug
1955                 if self.pkg.changes["distribution"].has_key("stable"):
1956                     self.Subst["__STABLE_WARNING__"] = """
1957 Note that this package is not part of the released stable Debian
1958 distribution.  It may have dependencies on other unreleased software,
1959 or other instabilities.  Please take care if you wish to install it.
1960 The update will eventually make its way into the next released Debian
1961 distribution."""
1962                 else:
1963                     self.Subst["__STABLE_WARNING__"] = ""
1964                 mail_message = utils.TemplateSubst(self.Subst, template)
1965                 utils.send_mail(mail_message)
1966
1967                 # Clear up after ourselves
1968                 del self.Subst["__BUG_NUMBER__"]
1969                 del self.Subst["__STABLE_WARNING__"]
1970
1971         if action and self.logger:
1972             self.logger.log(["closing bugs"] + bugs)
1973
1974         summary += "\n"
1975
1976         return summary
1977
1978     ###########################################################################
1979
1980     def announce(self, short_summary, action):
1981         """
1982         Send an announce mail about a new upload.
1983
1984         @type short_summary: string
1985         @param short_summary: Short summary text to include in the mail
1986
1987         @type action: bool
1988         @param action: Set to false no real action will be done.
1989
1990         @rtype: string
1991         @return: Textstring about action taken.
1992
1993         """
1994
1995         cnf = Config()
1996         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1997
1998         # Only do announcements for source uploads with a recent dpkg-dev installed
1999         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2000            self.pkg.changes["architecture"].has_key("source"):
2001             return ""
2002
2003         lists_done = {}
2004         summary = ""
2005
2006         self.Subst["__SHORT_SUMMARY__"] = short_summary
2007
2008         for dist in self.pkg.changes["distribution"].keys():
2009             suite = get_suite(dist)
2010             if suite is None: continue
2011             announce_list = suite.announce
2012             if announce_list == "" or lists_done.has_key(announce_list):
2013                 continue
2014
2015             lists_done[announce_list] = 1
2016             summary += "Announcing to %s\n" % (announce_list)
2017
2018             if action:
2019                 self.update_subst()
2020                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2021                 if cnf.get("Dinstall::TrackingServer") and \
2022                    self.pkg.changes["architecture"].has_key("source"):
2023                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2024                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2025
2026                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2027                 utils.send_mail(mail_message)
2028
2029                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2030
2031         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2032             summary = self.close_bugs(summary, action)
2033
2034         del self.Subst["__SHORT_SUMMARY__"]
2035
2036         return summary
2037
2038     ###########################################################################
2039     @session_wrapper
2040     def accept (self, summary, short_summary, session=None):
2041         """
2042         Accept an upload.
2043
2044         This moves all files referenced from the .changes into the pool,
2045         sends the accepted mail, announces to lists, closes bugs and
2046         also checks for override disparities. If enabled it will write out
2047         the version history for the BTS Version Tracking and will finally call
2048         L{queue_build}.
2049
2050         @type summary: string
2051         @param summary: Summary text
2052
2053         @type short_summary: string
2054         @param short_summary: Short summary
2055         """
2056
2057         cnf = Config()
2058         stats = SummaryStats()
2059
2060         print "Installing."
2061         self.logger.log(["installing changes", self.pkg.changes_file])
2062
2063         binaries = []
2064         poolfiles = []
2065
2066         # Add the .dsc file to the DB first
2067         for newfile, entry in self.pkg.files.items():
2068             if entry["type"] == "dsc":
2069                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2070                 for j in pfs:
2071                     poolfiles.append(j)
2072
2073         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2074         for newfile, entry in self.pkg.files.items():
2075             if entry["type"] == "deb":
2076                 b, pf = add_deb_to_db(self, newfile, session)
2077                 binaries.append(b)
2078                 poolfiles.append(pf)
2079
2080         # If this is a sourceful diff only upload that is moving
2081         # cross-component we need to copy the .orig files into the new
2082         # component too for the same reasons as above.
2083         # XXX: mhy: I think this should be in add_dsc_to_db
2084         if self.pkg.changes["architecture"].has_key("source"):
2085             for orig_file in self.pkg.orig_files.keys():
2086                 if not self.pkg.orig_files[orig_file].has_key("id"):
2087                     continue # Skip if it's not in the pool
2088                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2089                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2090                     continue # Skip if the location didn't change
2091
2092                 # Do the move
2093                 oldf = get_poolfile_by_id(orig_file_id, session)
2094                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2095                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2096                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2097
2098                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2099
2100                 # TODO: Care about size/md5sum collisions etc
2101                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2102
2103                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2104                 if newf is None:
2105                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2106                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2107
2108                     session.flush()
2109
2110                     # Don't reference the old file from this changes
2111                     for p in poolfiles:
2112                         if p.file_id == oldf.file_id:
2113                             poolfiles.remove(p)
2114
2115                     poolfiles.append(newf)
2116
2117                     # Fix up the DSC references
2118                     toremove = []
2119
2120                     for df in source.srcfiles:
2121                         if df.poolfile.file_id == oldf.file_id:
2122                             # Add a new DSC entry and mark the old one for deletion
2123                             # Don't do it in the loop so we don't change the thing we're iterating over
2124                             newdscf = DSCFile()
2125                             newdscf.source_id = source.source_id
2126                             newdscf.poolfile_id = newf.file_id
2127                             session.add(newdscf)
2128
2129                             toremove.append(df)
2130
2131                     for df in toremove:
2132                         session.delete(df)
2133
2134                     # Flush our changes
2135                     session.flush()
2136
2137                     # Make sure that our source object is up-to-date
2138                     session.expire(source)
2139
2140         # Add changelog information to the database
2141         self.store_changelog()
2142
2143         # Install the files into the pool
2144         for newfile, entry in self.pkg.files.items():
2145             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2146             utils.move(newfile, destination)
2147             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2148             stats.accept_bytes += float(entry["size"])
2149
2150         # Copy the .changes file across for suite which need it.
2151         copy_changes = dict([(x.copychanges, '')
2152                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2153                              if x.copychanges is not None])
2154
2155         for dest in copy_changes.keys():
2156             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2157
2158         # We're done - commit the database changes
2159         session.commit()
2160         # Our SQL session will automatically start a new transaction after
2161         # the last commit
2162
2163         # Now ensure that the metadata has been added
2164         # This has to be done after we copy the files into the pool
2165         # For source if we have it:
2166         if self.pkg.changes["architecture"].has_key("source"):
2167             import_metadata_into_db(source, session)
2168
2169         # Now for any of our binaries
2170         for b in binaries:
2171             import_metadata_into_db(b, session)
2172
2173         session.commit()
2174
2175         # Move the .changes into the 'done' directory
2176         ye, mo, da = time.gmtime()[0:3]
2177         donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2178         if not os.path.isdir(donedir):
2179             os.makedirs(donedir)
2180
2181         utils.move(self.pkg.changes_file,
2182                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2183
2184         if self.pkg.changes["architecture"].has_key("source"):
2185             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2186
2187         self.update_subst()
2188         self.Subst["__SUMMARY__"] = summary
2189         mail_message = utils.TemplateSubst(self.Subst,
2190                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2191         utils.send_mail(mail_message)
2192         self.announce(short_summary, 1)
2193
2194         ## Helper stuff for DebBugs Version Tracking
2195         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2196             if self.pkg.changes["architecture"].has_key("source"):
2197                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2198                 version_history = os.fdopen(fd, 'w')
2199                 version_history.write(self.pkg.dsc["bts changelog"])
2200                 version_history.close()
2201                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2202                                       self.pkg.changes_file[:-8]+".versions")
2203                 os.rename(temp_filename, filename)
2204                 os.chmod(filename, 0644)
2205
2206             # Write out the binary -> source mapping.
2207             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2208             debinfo = os.fdopen(fd, 'w')
2209             for name, entry in sorted(self.pkg.files.items()):
2210                 if entry["type"] == "deb":
2211                     line = " ".join([entry["package"], entry["version"],
2212                                      entry["architecture"], entry["source package"],
2213                                      entry["source version"]])
2214                     debinfo.write(line+"\n")
2215             debinfo.close()
2216             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2217                                   self.pkg.changes_file[:-8]+".debinfo")
2218             os.rename(temp_filename, filename)
2219             os.chmod(filename, 0644)
2220
2221         session.commit()
2222
2223         # Set up our copy queues (e.g. buildd queues)
2224         for suite_name in self.pkg.changes["distribution"].keys():
2225             suite = get_suite(suite_name, session)
2226             for q in suite.copy_queues:
2227                 for f in poolfiles:
2228                     q.add_file_from_pool(f)
2229
2230         session.commit()
2231
2232         # Finally...
2233         stats.accept_count += 1
2234
2235     def check_override(self):
2236         """
2237         Checks override entries for validity. Mails "Override disparity" warnings,
2238         if that feature is enabled.
2239
2240         Abandons the check if
2241           - override disparity checks are disabled
2242           - mail sending is disabled
2243         """
2244
2245         cnf = Config()
2246
2247         # Abandon the check if override disparity checks have been disabled
2248         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2249             return
2250
2251         summary = self.pkg.check_override()
2252
2253         if summary == "":
2254             return
2255
2256         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2257
2258         self.update_subst()
2259         self.Subst["__SUMMARY__"] = summary
2260         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2261         utils.send_mail(mail_message)
2262         del self.Subst["__SUMMARY__"]
2263
2264     ###########################################################################
2265
2266     def remove(self, from_dir=None):
2267         """
2268         Used (for instance) in p-u to remove the package from unchecked
2269
2270         Also removes the package from holding area.
2271         """
2272         if from_dir is None:
2273             from_dir = self.pkg.directory
2274         h = Holding()
2275
2276         for f in self.pkg.files.keys():
2277             os.unlink(os.path.join(from_dir, f))
2278             if os.path.exists(os.path.join(h.holding_dir, f)):
2279                 os.unlink(os.path.join(h.holding_dir, f))
2280
2281         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2282         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2283             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2284
2285     ###########################################################################
2286
2287     def move_to_queue (self, queue):
2288         """
2289         Move files to a destination queue using the permissions in the table
2290         """
2291         h = Holding()
2292         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2293                    queue.path, perms=int(queue.change_perms, 8))
2294         for f in self.pkg.files.keys():
2295             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2296
2297     ###########################################################################
2298
2299     def force_reject(self, reject_files):
2300         """
2301         Forcefully move files from the current directory to the
2302         reject directory.  If any file already exists in the reject
2303         directory it will be moved to the morgue to make way for
2304         the new file.
2305
2306         @type reject_files: dict
2307         @param reject_files: file dictionary
2308
2309         """
2310
2311         cnf = Config()
2312
2313         for file_entry in reject_files:
2314             # Skip any files which don't exist or which we don't have permission to copy.
2315             if os.access(file_entry, os.R_OK) == 0:
2316                 continue
2317
2318             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2319
2320             try:
2321                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2322             except OSError, e:
2323                 # File exists?  Let's find a new name by adding a number
2324                 if e.errno == errno.EEXIST:
2325                     try:
2326                         dest_file = utils.find_next_free(dest_file, 255)
2327                     except NoFreeFilenameError:
2328                         # Something's either gone badly Pete Tong, or
2329                         # someone is trying to exploit us.
2330                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2331                         return
2332
2333                     # Make sure we really got it
2334                     try:
2335                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2336                     except OSError, e:
2337                         # Likewise
2338                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2339                         return
2340                 else:
2341                     raise
2342             # If we got here, we own the destination file, so we can
2343             # safely overwrite it.
2344             utils.move(file_entry, dest_file, 1, perms=0660)
2345             os.close(dest_fd)
2346
2347     ###########################################################################
2348     def do_reject (self, manual=0, reject_message="", notes=""):
2349         """
2350         Reject an upload. If called without a reject message or C{manual} is
2351         true, spawn an editor so the user can write one.
2352
2353         @type manual: bool
2354         @param manual: manual or automated rejection
2355
2356         @type reject_message: string
2357         @param reject_message: A reject message
2358
2359         @return: 0
2360
2361         """
2362         # If we weren't given a manual rejection message, spawn an
2363         # editor so the user can add one in...
2364         if manual and not reject_message:
2365             (fd, temp_filename) = utils.temp_filename()
2366             temp_file = os.fdopen(fd, 'w')
2367             if len(notes) > 0:
2368                 for note in notes:
2369                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2370                                     % (note.author, note.version, note.notedate, note.comment))
2371             temp_file.close()
2372             editor = os.environ.get("EDITOR","vi")
2373             answer = 'E'
2374             while answer == 'E':
2375                 os.system("%s %s" % (editor, temp_filename))
2376                 temp_fh = utils.open_file(temp_filename)
2377                 reject_message = "".join(temp_fh.readlines())
2378                 temp_fh.close()
2379                 print "Reject message:"
2380                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2381                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2382                 answer = "XXX"
2383                 while prompt.find(answer) == -1:
2384                     answer = utils.our_raw_input(prompt)
2385                     m = re_default_answer.search(prompt)
2386                     if answer == "":
2387                         answer = m.group(1)
2388                     answer = answer[:1].upper()
2389             os.unlink(temp_filename)
2390             if answer == 'A':
2391                 return 1
2392             elif answer == 'Q':
2393                 sys.exit(0)
2394
2395         print "Rejecting.\n"
2396
2397         cnf = Config()
2398
2399         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2400         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2401
2402         # Move all the files into the reject directory
2403         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2404         self.force_reject(reject_files)
2405
2406         # If we fail here someone is probably trying to exploit the race
2407         # so let's just raise an exception ...
2408         if os.path.exists(reason_filename):
2409             os.unlink(reason_filename)
2410         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2411
2412         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2413
2414         self.update_subst()
2415         if not manual:
2416             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2417             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2418             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2419             os.write(reason_fd, reject_message)
2420             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2421         else:
2422             # Build up the rejection email
2423             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2424             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2425             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2426             self.Subst["__REJECT_MESSAGE__"] = ""
2427             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2428             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2429             # Write the rejection email out as the <foo>.reason file
2430             os.write(reason_fd, reject_mail_message)
2431
2432         del self.Subst["__REJECTOR_ADDRESS__"]
2433         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2434         del self.Subst["__CC__"]
2435
2436         os.close(reason_fd)
2437
2438         # Send the rejection mail
2439         utils.send_mail(reject_mail_message)
2440
2441         if self.logger:
2442             self.logger.log(["rejected", self.pkg.changes_file])
2443
2444         stats = SummaryStats()
2445         stats.reject_count += 1
2446         return 0
2447
2448     ################################################################################
2449     def in_override_p(self, package, component, suite, binary_type, filename, session):
2450         """
2451         Check if a package already has override entries in the DB
2452
2453         @type package: string
2454         @param package: package name
2455
2456         @type component: string
2457         @param component: database id of the component
2458
2459         @type suite: int
2460         @param suite: database id of the suite
2461
2462         @type binary_type: string
2463         @param binary_type: type of the package
2464
2465         @type filename: string
2466         @param filename: filename we check
2467
2468         @return: the database result. But noone cares anyway.
2469
2470         """
2471
2472         cnf = Config()
2473
2474         if binary_type == "": # must be source
2475             file_type = "dsc"
2476         else:
2477             file_type = binary_type
2478
2479         # Override suite name; used for example with proposed-updates
2480         oldsuite = get_suite(suite, session)
2481         if (not oldsuite is None) and oldsuite.overridesuite:
2482             suite = oldsuite.overridesuite
2483
2484         result = get_override(package, suite, component, file_type, session)
2485
2486         # If checking for a source package fall back on the binary override type
2487         if file_type == "dsc" and len(result) < 1:
2488             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2489
2490         # Remember the section and priority so we can check them later if appropriate
2491         if len(result) > 0:
2492             result = result[0]
2493             self.pkg.files[filename]["override section"] = result.section.section
2494             self.pkg.files[filename]["override priority"] = result.priority.priority
2495             return result
2496
2497         return None
2498
2499     ################################################################################
2500     def get_anyversion(self, sv_list, suite):
2501         """
2502         @type sv_list: list
2503         @param sv_list: list of (suite, version) tuples to check
2504
2505         @type suite: string
2506         @param suite: suite name
2507
2508         Description: TODO
2509         """
2510         Cnf = Config()
2511         anyversion = None
2512         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2513         for (s, v) in sv_list:
2514             if s in [ x.lower() for x in anysuite ]:
2515                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2516                     anyversion = v
2517
2518         return anyversion
2519
2520     ################################################################################
2521
2522     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2523         """
2524         @type sv_list: list
2525         @param sv_list: list of (suite, version) tuples to check
2526
2527         @type filename: string
2528         @param filename: XXX
2529
2530         @type new_version: string
2531         @param new_version: XXX
2532
2533         Ensure versions are newer than existing packages in target
2534         suites and that cross-suite version checking rules as
2535         set out in the conf file are satisfied.
2536         """
2537
2538         cnf = Config()
2539
2540         # Check versions for each target suite
2541         for target_suite in self.pkg.changes["distribution"].keys():
2542             # Check we can find the target suite
2543             ts = get_suite(target_suite)
2544             if ts is None:
2545                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2546                 continue
2547
2548             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2549             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2550
2551             # Enforce "must be newer than target suite" even if conffile omits it
2552             if target_suite not in must_be_newer_than:
2553                 must_be_newer_than.append(target_suite)
2554
2555             for (suite, existent_version) in sv_list:
2556                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2557
2558                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2559                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2560
2561                 if suite in must_be_older_than and vercmp > -1:
2562                     cansave = 0
2563
2564                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2565                         # we really use the other suite, ignoring the conflicting one ...
2566                         addsuite = self.pkg.changes["distribution-version"][suite]
2567
2568                         add_version = self.get_anyversion(sv_list, addsuite)
2569                         target_version = self.get_anyversion(sv_list, target_suite)
2570
2571                         if not add_version:
2572                             # not add_version can only happen if we map to a suite
2573                             # that doesn't enhance the suite we're propup'ing from.
2574                             # so "propup-ver x a b c; map a d" is a problem only if
2575                             # d doesn't enhance a.
2576                             #
2577                             # i think we could always propagate in this case, rather
2578                             # than complaining. either way, this isn't a REJECT issue
2579                             #
2580                             # And - we really should complain to the dorks who configured dak
2581                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2582                             self.pkg.changes.setdefault("propdistribution", {})
2583                             self.pkg.changes["propdistribution"][addsuite] = 1
2584                             cansave = 1
2585                         elif not target_version:
2586                             # not targets_version is true when the package is NEW
2587                             # we could just stick with the "...old version..." REJECT
2588                             # for this, I think.
2589                             self.rejects.append("Won't propogate NEW packages.")
2590                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2591                             # propogation would be redundant. no need to reject though.
2592                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2593                             cansave = 1
2594                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2595                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2596                             # propogate!!
2597                             self.warnings.append("Propogating upload to %s" % (addsuite))
2598                             self.pkg.changes.setdefault("propdistribution", {})
2599                             self.pkg.changes["propdistribution"][addsuite] = 1
2600                             cansave = 1
2601
2602                     if not cansave:
2603                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2604
2605     ################################################################################
2606     def check_binary_against_db(self, filename, session):
2607         # Ensure version is sane
2608         self.cross_suite_version_check( \
2609             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2610                 self.pkg.files[filename]["architecture"], session),
2611             filename, self.pkg.files[filename]["version"], sourceful=False)
2612
2613         # Check for any existing copies of the file
2614         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2615         q = q.filter_by(version=self.pkg.files[filename]["version"])
2616         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2617
2618         if q.count() > 0:
2619             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2620
2621     ################################################################################
2622
2623     def check_source_against_db(self, filename, session):
2624         source = self.pkg.dsc.get("source")
2625         version = self.pkg.dsc.get("version")
2626
2627         # Ensure version is sane
2628         self.cross_suite_version_check( \
2629             get_suite_version_by_source(source, session), filename, version,
2630             sourceful=True)
2631
2632     ################################################################################
2633     def check_dsc_against_db(self, filename, session):
2634         """
2635
2636         @warning: NB: this function can remove entries from the 'files' index [if
2637          the orig tarball is a duplicate of the one in the archive]; if
2638          you're iterating over 'files' and call this function as part of
2639          the loop, be sure to add a check to the top of the loop to
2640          ensure you haven't just tried to dereference the deleted entry.
2641
2642         """
2643
2644         Cnf = Config()
2645         self.pkg.orig_files = {} # XXX: do we need to clear it?
2646         orig_files = self.pkg.orig_files
2647
2648         # Try and find all files mentioned in the .dsc.  This has
2649         # to work harder to cope with the multiple possible
2650         # locations of an .orig.tar.gz.
2651         # The ordering on the select is needed to pick the newest orig
2652         # when it exists in multiple places.
2653         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2654             found = None
2655             if self.pkg.files.has_key(dsc_name):
2656                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2657                 actual_size = int(self.pkg.files[dsc_name]["size"])
2658                 found = "%s in incoming" % (dsc_name)
2659
2660                 # Check the file does not already exist in the archive
2661                 ql = get_poolfile_like_name(dsc_name, session)
2662
2663                 # Strip out anything that isn't '%s' or '/%s$'
2664                 for i in ql:
2665                     if not i.filename.endswith(dsc_name):
2666                         ql.remove(i)
2667
2668                 # "[dak] has not broken them.  [dak] has fixed a
2669                 # brokenness.  Your crappy hack exploited a bug in
2670                 # the old dinstall.
2671                 #
2672                 # "(Come on!  I thought it was always obvious that
2673                 # one just doesn't release different files with
2674                 # the same name and version.)"
2675                 #                        -- ajk@ on d-devel@l.d.o
2676
2677                 if len(ql) > 0:
2678                     # Ignore exact matches for .orig.tar.gz
2679                     match = 0
2680                     if re_is_orig_source.match(dsc_name):
2681                         for i in ql:
2682                             if self.pkg.files.has_key(dsc_name) and \
2683                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2684                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2685                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2686                                 # TODO: Don't delete the entry, just mark it as not needed
2687                                 # This would fix the stupidity of changing something we often iterate over
2688                                 # whilst we're doing it
2689                                 del self.pkg.files[dsc_name]
2690                                 dsc_entry["files id"] = i.file_id
2691                                 if not orig_files.has_key(dsc_name):
2692                                     orig_files[dsc_name] = {}
2693                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2694                                 match = 1
2695
2696                                 # Don't bitch that we couldn't find this file later
2697                                 try:
2698                                     self.later_check_files.remove(dsc_name)
2699                                 except ValueError:
2700                                     pass
2701
2702
2703                     if not match:
2704                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2705
2706             elif re_is_orig_source.match(dsc_name):
2707                 # Check in the pool
2708                 ql = get_poolfile_like_name(dsc_name, session)
2709
2710                 # Strip out anything that isn't '%s' or '/%s$'
2711                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2712                 for i in ql:
2713                     if not i.filename.endswith(dsc_name):
2714                         ql.remove(i)
2715
2716                 if len(ql) > 0:
2717                     # Unfortunately, we may get more than one match here if,
2718                     # for example, the package was in potato but had an -sa
2719                     # upload in woody.  So we need to choose the right one.
2720
2721                     # default to something sane in case we don't match any or have only one
2722                     x = ql[0]
2723
2724                     if len(ql) > 1:
2725                         for i in ql:
2726                             old_file = os.path.join(i.location.path, i.filename)
2727                             old_file_fh = utils.open_file(old_file)
2728                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2729                             old_file_fh.close()
2730                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2731                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2732                                 x = i
2733
2734                     old_file = os.path.join(i.location.path, i.filename)
2735                     old_file_fh = utils.open_file(old_file)
2736                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2737                     old_file_fh.close()
2738                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2739                     found = old_file
2740                     suite_type = x.location.archive_type
2741                     # need this for updating dsc_files in install()
2742                     dsc_entry["files id"] = x.file_id
2743                     # See install() in process-accepted...
2744                     if not orig_files.has_key(dsc_name):
2745                         orig_files[dsc_name] = {}
2746                     orig_files[dsc_name]["id"] = x.file_id
2747                     orig_files[dsc_name]["path"] = old_file
2748                     orig_files[dsc_name]["location"] = x.location.location_id
2749                 else:
2750                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2751                     # Not there? Check the queue directories...
2752                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2753                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2754                             continue
2755                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2756                         if os.path.exists(in_otherdir):
2757                             in_otherdir_fh = utils.open_file(in_otherdir)
2758                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2759                             in_otherdir_fh.close()
2760                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2761                             found = in_otherdir
2762                             if not orig_files.has_key(dsc_name):
2763                                 orig_files[dsc_name] = {}
2764                             orig_files[dsc_name]["path"] = in_otherdir
2765
2766                     if not found:
2767                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2768                         continue
2769             else:
2770                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2771                 continue
2772             if actual_md5 != dsc_entry["md5sum"]:
2773                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2774             if actual_size != int(dsc_entry["size"]):
2775                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2776
2777     ################################################################################
2778     # This is used by process-new and process-holding to recheck a changes file
2779     # at the time we're running.  It mainly wraps various other internal functions
2780     # and is similar to accepted_checks - these should probably be tidied up
2781     # and combined
2782     def recheck(self, session):
2783         cnf = Config()
2784         for f in self.pkg.files.keys():
2785             # The .orig.tar.gz can disappear out from under us is it's a
2786             # duplicate of one in the archive.
2787             if not self.pkg.files.has_key(f):
2788                 continue
2789
2790             entry = self.pkg.files[f]
2791
2792             # Check that the source still exists
2793             if entry["type"] == "deb":
2794                 source_version = entry["source version"]
2795                 source_package = entry["source package"]
2796                 if not self.pkg.changes["architecture"].has_key("source") \
2797                    and not source_exists(source_package, source_version, \
2798                     suites = self.pkg.changes["distribution"].keys(), session = session):
2799                     source_epochless_version = re_no_epoch.sub('', source_version)
2800                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2801                     found = False
2802                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2803                         if cnf.has_key("Dir::Queue::%s" % (q)):
2804                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2805                                 found = True
2806                     if not found:
2807                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2808
2809             # Version and file overwrite checks
2810             if entry["type"] == "deb":
2811                 self.check_binary_against_db(f, session)
2812             elif entry["type"] == "dsc":
2813                 self.check_source_against_db(f, session)
2814                 self.check_dsc_against_db(f, session)
2815
2816     ################################################################################
2817     def accepted_checks(self, overwrite_checks, session):
2818         # Recheck anything that relies on the database; since that's not
2819         # frozen between accept and our run time when called from p-a.
2820
2821         # overwrite_checks is set to False when installing to stable/oldstable
2822
2823         propogate={}
2824         nopropogate={}
2825
2826         # Find the .dsc (again)
2827         dsc_filename = None
2828         for f in self.pkg.files.keys():
2829             if self.pkg.files[f]["type"] == "dsc":
2830                 dsc_filename = f
2831
2832         for checkfile in self.pkg.files.keys():
2833             # The .orig.tar.gz can disappear out from under us is it's a
2834             # duplicate of one in the archive.
2835             if not self.pkg.files.has_key(checkfile):
2836                 continue
2837
2838             entry = self.pkg.files[checkfile]
2839
2840             # Check that the source still exists
2841             if entry["type"] == "deb":
2842                 source_version = entry["source version"]
2843                 source_package = entry["source package"]
2844                 if not self.pkg.changes["architecture"].has_key("source") \
2845                    and not source_exists(source_package, source_version, \
2846                     suites = self.pkg.changes["distribution"].keys(), \
2847                     session = session):
2848                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2849
2850             # Version and file overwrite checks
2851             if overwrite_checks:
2852                 if entry["type"] == "deb":
2853                     self.check_binary_against_db(checkfile, session)
2854                 elif entry["type"] == "dsc":
2855                     self.check_source_against_db(checkfile, session)
2856                     self.check_dsc_against_db(dsc_filename, session)
2857
2858             # propogate in the case it is in the override tables:
2859             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2860                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2861                     propogate[suite] = 1
2862                 else:
2863                     nopropogate[suite] = 1
2864
2865         for suite in propogate.keys():
2866             if suite in nopropogate:
2867                 continue
2868             self.pkg.changes["distribution"][suite] = 1
2869
2870         for checkfile in self.pkg.files.keys():
2871             # Check the package is still in the override tables
2872             for suite in self.pkg.changes["distribution"].keys():
2873                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2874                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2875
2876     ################################################################################
2877     # If any file of an upload has a recent mtime then chances are good
2878     # the file is still being uploaded.
2879
2880     def upload_too_new(self):
2881         cnf = Config()
2882         too_new = False
2883         # Move back to the original directory to get accurate time stamps
2884         cwd = os.getcwd()
2885         os.chdir(self.pkg.directory)
2886         file_list = self.pkg.files.keys()
2887         file_list.extend(self.pkg.dsc_files.keys())
2888         file_list.append(self.pkg.changes_file)
2889         for f in file_list:
2890             try:
2891                 last_modified = time.time()-os.path.getmtime(f)
2892                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2893                     too_new = True
2894                     break
2895             except:
2896                 pass
2897
2898         os.chdir(cwd)
2899         return too_new
2900
2901     def store_changelog(self):
2902
2903         # Skip binary-only upload if it is not a bin-NMU
2904         if not self.pkg.changes['architecture'].has_key('source'):
2905             from daklib.regexes import re_bin_only_nmu
2906             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2907                 return
2908
2909         session = DBConn().session()
2910
2911         # Check if upload already has a changelog entry
2912         query = """SELECT changelog_id FROM changes WHERE source = :source
2913                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2914         if session.execute(query, {'source': self.pkg.changes['source'], \
2915                                    'version': self.pkg.changes['version'], \
2916                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2917             session.commit()
2918             return
2919
2920         # Add current changelog text into changelogs_text table, return created ID
2921         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2922         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2923
2924         # Link ID to the upload available in changes table
2925         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2926                    AND version = :version AND architecture = :architecture"""
2927         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2928                                 'version': self.pkg.changes['version'], \
2929                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2930
2931         session.commit()