]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Don't say we're sending mail when we're not
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_set(dsc, session).items():
137             if not new.has_key(package):
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = MTime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = MTime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         if cnf.has_key("Dinstall::BugServer"):
447             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError, line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError, line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError, format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError, msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError, msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not get_suite(suite.lower()):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError, e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882
883                 byhand_dir = get_policy_queue('byhand', session).path
884                 new_dir = get_policy_queue('new', session).path
885
886                 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
887                     entry["byhand"] = 1
888                 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
889                     entry["new"] = 1
890                 else:
891                     dsc_file_exists = False
892                     # TODO: Don't hardcode this list: use all relevant queues
893                     #       The question is how to determine what is relevant
894                     for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
895                         queue = get_policy_queue(queue_name, session)
896                         if queue:
897                             if os.path.exists(os.path.join(queue.path, dsc_filename)):
898                                 dsc_file_exists = True
899                                 break
900
901                     if not dsc_file_exists:
902                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
903
904         # Check the version and for file overwrites
905         self.check_binary_against_db(f, session)
906
907     def source_file_checks(self, f, session):
908         entry = self.pkg.files[f]
909
910         m = re_issource.match(f)
911         if not m:
912             return
913
914         entry["package"] = m.group(1)
915         entry["version"] = m.group(2)
916         entry["type"] = m.group(3)
917
918         # Ensure the source package name matches the Source filed in the .changes
919         if self.pkg.changes["source"] != entry["package"]:
920             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
921
922         # Ensure the source version matches the version in the .changes file
923         if re_is_orig_source.match(f):
924             changes_version = self.pkg.changes["chopversion2"]
925         else:
926             changes_version = self.pkg.changes["chopversion"]
927
928         if changes_version != entry["version"]:
929             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
930
931         # Ensure the .changes lists source in the Architecture field
932         if not self.pkg.changes["architecture"].has_key("source"):
933             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
934
935         # Check the signature of a .dsc file
936         if entry["type"] == "dsc":
937             # check_signature returns either:
938             #  (None, [list, of, rejects]) or (signature, [])
939             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
940             for j in rejects:
941                 self.rejects.append(j)
942
943         entry["architecture"] = "source"
944
945     def per_suite_file_checks(self, f, suite, session):
946         cnf = Config()
947         entry = self.pkg.files[f]
948
949         # Skip byhand
950         if entry.has_key("byhand"):
951             return
952
953         # Check we have fields we need to do these checks
954         oktogo = True
955         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956             if not entry.has_key(m):
957                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
958                 oktogo = False
959
960         if not oktogo:
961             return
962
963         # Handle component mappings
964         for m in cnf.ValueList("ComponentMappings"):
965             (source, dest) = m.split()
966             if entry["component"] == source:
967                 entry["original component"] = source
968                 entry["component"] = dest
969
970         # Ensure the component is valid for the target suite
971         if entry["component"] not in get_component_names(session):
972             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973             return
974
975         # Validate the component
976         if not get_component(entry["component"], session):
977             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978             return
979
980         # See if the package is NEW
981         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982             entry["new"] = 1
983
984         # Validate the priority
985         if entry["priority"].find('/') != -1:
986             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
987
988         # Determine the location
989         location = cnf["Dir::Pool"]
990         l = get_location(location, entry["component"], session=session)
991         if l is None:
992             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993             entry["location id"] = -1
994         else:
995             entry["location id"] = l.location_id
996
997         # Check the md5sum & size against existing files (if any)
998         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
999
1000         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001                                          entry["size"], entry["md5sum"], entry["location id"])
1002
1003         if found is None:
1004             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005         elif found is False and poolfile is not None:
1006             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1007         else:
1008             if poolfile is None:
1009                 entry["files id"] = None
1010             else:
1011                 entry["files id"] = poolfile.file_id
1012
1013         # Check for packages that have moved from one component to another
1014         entry['suite'] = suite
1015         arch_list = [entry["architecture"], 'all']
1016         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017             [suite], arch_list = arch_list, session = session)
1018         if component is not None:
1019             entry["othercomponents"] = component
1020
1021     def check_files(self, action=True):
1022         file_keys = self.pkg.files.keys()
1023         holding = Holding()
1024         cnf = Config()
1025
1026         if action:
1027             cwd = os.getcwd()
1028             os.chdir(self.pkg.directory)
1029             for f in file_keys:
1030                 ret = holding.copy_to_holding(f)
1031                 if ret is not None:
1032                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1033
1034             os.chdir(cwd)
1035
1036         # check we already know the changes file
1037         # [NB: this check must be done post-suite mapping]
1038         base_filename = os.path.basename(self.pkg.changes_file)
1039
1040         session = DBConn().session()
1041
1042         try:
1043             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044             # if in the pool or in a queue other than unchecked, reject
1045             if (dbc.in_queue is None) \
1046                    or (dbc.in_queue is not None
1047                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048                 self.rejects.append("%s file already known to dak" % base_filename)
1049         except NoResultFound, e:
1050             # not known, good
1051             pass
1052
1053         has_binaries = False
1054         has_source = False
1055
1056         for f, entry in self.pkg.files.items():
1057             # Ensure the file does not already exist in one of the accepted directories
1058             # TODO: Dynamically generate this list
1059             for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1060                 queue = get_policy_queue(queue_name, session)
1061                 if queue and os.path.exists(os.path.join(queue.path, f)):
1062                     self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1063
1064             if not re_taint_free.match(f):
1065                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1066
1067             # Check the file is readable
1068             if os.access(f, os.R_OK) == 0:
1069                 # When running in -n, copy_to_holding() won't have
1070                 # generated the reject_message, so we need to.
1071                 if action:
1072                     if os.path.exists(f):
1073                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1074                     else:
1075                         # Don't directly reject, mark to check later to deal with orig's
1076                         # we can find in the pool
1077                         self.later_check_files.append(f)
1078                 entry["type"] = "unreadable"
1079                 continue
1080
1081             # If it's byhand skip remaining checks
1082             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1083                 entry["byhand"] = 1
1084                 entry["type"] = "byhand"
1085
1086             # Checks for a binary package...
1087             elif re_isadeb.match(f):
1088                 has_binaries = True
1089                 entry["type"] = "deb"
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.binary_file_checks(f, session)
1093
1094             # Checks for a source package...
1095             elif re_issource.match(f):
1096                 has_source = True
1097
1098                 # This routine appends to self.rejects/warnings as appropriate
1099                 self.source_file_checks(f, session)
1100
1101             # Not a binary or source package?  Assume byhand...
1102             else:
1103                 entry["byhand"] = 1
1104                 entry["type"] = "byhand"
1105
1106             # Per-suite file checks
1107             entry["oldfiles"] = {}
1108             for suite in self.pkg.changes["distribution"].keys():
1109                 self.per_suite_file_checks(f, suite, session)
1110
1111         session.close()
1112
1113         # If the .changes file says it has source, it must have source.
1114         if self.pkg.changes["architecture"].has_key("source"):
1115             if not has_source:
1116                 self.rejects.append("no source found and Architecture line in changes mention source.")
1117
1118             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1119                 self.rejects.append("source only uploads are not supported.")
1120
1121     ###########################################################################
1122
1123     def __dsc_filename(self):
1124         """
1125         Returns: (Status, Dsc_Filename)
1126         where
1127           Status: Boolean; True when there was no error, False otherwise
1128           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1129         """
1130         dsc_filename = None
1131
1132         # find the dsc
1133         for name, entry in self.pkg.files.items():
1134             if entry.has_key("type") and entry["type"] == "dsc":
1135                 if dsc_filename:
1136                     return False, "cannot process a .changes file with multiple .dsc's."
1137                 else:
1138                     dsc_filename = name
1139
1140         if not dsc_filename:
1141             return False, "source uploads must contain a dsc file"
1142
1143         return True, dsc_filename
1144
1145     def load_dsc(self, action=True, signing_rules=1):
1146         """
1147         Find and load the dsc from self.pkg.files into self.dsc
1148
1149         Returns: (Status, Reason)
1150         where
1151           Status: Boolean; True when there was no error, False otherwise
1152           Reason: String; When Status is False this describes the error
1153         """
1154
1155         # find the dsc
1156         (status, dsc_filename) = self.__dsc_filename()
1157         if not status:
1158             # If status is false, dsc_filename has the reason
1159             return False, dsc_filename
1160
1161         try:
1162             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1163         except CantOpenError:
1164             if not action:
1165                 return False, "%s: can't read file." % (dsc_filename)
1166         except ParseChangesError, line:
1167             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1168         except InvalidDscError, line:
1169             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1170         except ChangesUnicodeError:
1171             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1172
1173         return True, None
1174
1175     ###########################################################################
1176
1177     def check_dsc(self, action=True, session=None):
1178         """Returns bool indicating whether or not the source changes are valid"""
1179         # Ensure there is source to check
1180         if not self.pkg.changes["architecture"].has_key("source"):
1181             return True
1182
1183         (status, reason) = self.load_dsc(action=action)
1184         if not status:
1185             self.rejects.append(reason)
1186             return False
1187         (status, dsc_filename) = self.__dsc_filename()
1188         if not status:
1189             # If status is false, dsc_filename has the reason
1190             self.rejects.append(dsc_filename)
1191             return False
1192
1193         # Build up the file list of files mentioned by the .dsc
1194         try:
1195             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1196         except NoFilesFieldError:
1197             self.rejects.append("%s: no Files: field." % (dsc_filename))
1198             return False
1199         except UnknownFormatError, format:
1200             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1201             return False
1202         except ParseChangesError, line:
1203             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1204             return False
1205
1206         # Enforce mandatory fields
1207         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1208             if not self.pkg.dsc.has_key(i):
1209                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1210                 return False
1211
1212         # Validate the source and version fields
1213         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1214             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1215         if not re_valid_version.match(self.pkg.dsc["version"]):
1216             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1217
1218         # Only a limited list of source formats are allowed in each suite
1219         for dist in self.pkg.changes["distribution"].keys():
1220             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1221             if self.pkg.dsc["format"] not in allowed:
1222                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1223
1224         # Validate the Maintainer field
1225         try:
1226             # We ignore the return value
1227             fix_maintainer(self.pkg.dsc["maintainer"])
1228         except ParseMaintError, msg:
1229             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1230                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1231
1232         # Validate the build-depends field(s)
1233         for field_name in [ "build-depends", "build-depends-indep" ]:
1234             field = self.pkg.dsc.get(field_name)
1235             if field:
1236                 # Have apt try to parse them...
1237                 try:
1238                     apt_pkg.ParseSrcDepends(field)
1239                 except:
1240                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1241
1242         # Ensure the version number in the .dsc matches the version number in the .changes
1243         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1244         changes_version = self.pkg.files[dsc_filename]["version"]
1245
1246         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1247             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1248
1249         # Ensure the Files field contain only what's expected
1250         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1251
1252         # Ensure source is newer than existing source in target suites
1253         session = DBConn().session()
1254         self.check_source_against_db(dsc_filename, session)
1255         self.check_dsc_against_db(dsc_filename, session)
1256
1257         dbchg = get_dbchange(self.pkg.changes_file, session)
1258
1259         # Finally, check if we're missing any files
1260         for f in self.later_check_files:
1261             print 'XXX: %s' % f
1262             # Check if we've already processed this file if we have a dbchg object
1263             ok = False
1264             if dbchg:
1265                 for pf in dbchg.files:
1266                     if pf.filename == f and pf.processed:
1267                         self.notes.append('%s was already processed so we can go ahead' % f)
1268                         ok = True
1269                         del self.pkg.files[f]
1270             if not ok:
1271                 self.rejects.append("Could not find file %s references in changes" % f)
1272
1273         session.close()
1274
1275         return (len(self.rejects) == 0)
1276
1277     ###########################################################################
1278
1279     def get_changelog_versions(self, source_dir):
1280         """Extracts a the source package and (optionally) grabs the
1281         version history out of debian/changelog for the BTS."""
1282
1283         cnf = Config()
1284
1285         # Find the .dsc (again)
1286         dsc_filename = None
1287         for f in self.pkg.files.keys():
1288             if self.pkg.files[f]["type"] == "dsc":
1289                 dsc_filename = f
1290
1291         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1292         if not dsc_filename:
1293             return
1294
1295         # Create a symlink mirror of the source files in our temporary directory
1296         for f in self.pkg.files.keys():
1297             m = re_issource.match(f)
1298             if m:
1299                 src = os.path.join(source_dir, f)
1300                 # If a file is missing for whatever reason, give up.
1301                 if not os.path.exists(src):
1302                     return
1303                 ftype = m.group(3)
1304                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1305                    self.pkg.orig_files[f].has_key("path"):
1306                     continue
1307                 dest = os.path.join(os.getcwd(), f)
1308                 os.symlink(src, dest)
1309
1310         # If the orig files are not a part of the upload, create symlinks to the
1311         # existing copies.
1312         for orig_file in self.pkg.orig_files.keys():
1313             if not self.pkg.orig_files[orig_file].has_key("path"):
1314                 continue
1315             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1316             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1317
1318         # Extract the source
1319         try:
1320             unpacked = UnpackedSource(dsc_filename)
1321         except Exception, e:
1322             self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1323             return
1324
1325         if not cnf.Find("Dir::BTSVersionTrack"):
1326             return
1327
1328         # Get the upstream version
1329         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1330         if re_strip_revision.search(upstr_version):
1331             upstr_version = re_strip_revision.sub('', upstr_version)
1332
1333         # Ensure the changelog file exists
1334         changelog_file = unpacked.get_changelog_file()
1335         if changelog_file is None:
1336             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1337             return
1338
1339         # Parse the changelog
1340         self.pkg.dsc["bts changelog"] = ""
1341         for line in changelog_file.readlines():
1342             m = re_changelog_versions.match(line)
1343             if m:
1344                 self.pkg.dsc["bts changelog"] += line
1345         changelog_file.close()
1346         unpacked.cleanup()
1347
1348         # Check we found at least one revision in the changelog
1349         if not self.pkg.dsc["bts changelog"]:
1350             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1351
1352     def check_source(self):
1353         # Bail out if:
1354         #    a) there's no source
1355         if not self.pkg.changes["architecture"].has_key("source"):
1356             return
1357
1358         tmpdir = utils.temp_dirname()
1359
1360         # Move into the temporary directory
1361         cwd = os.getcwd()
1362         os.chdir(tmpdir)
1363
1364         # Get the changelog version history
1365         self.get_changelog_versions(cwd)
1366
1367         # Move back and cleanup the temporary tree
1368         os.chdir(cwd)
1369
1370         try:
1371             shutil.rmtree(tmpdir)
1372         except OSError, e:
1373             if e.errno != errno.EACCES:
1374                 print "foobar"
1375                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1376
1377             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1378             # We probably have u-r or u-w directories so chmod everything
1379             # and try again.
1380             cmd = "chmod -R u+rwx %s" % (tmpdir)
1381             result = os.system(cmd)
1382             if result != 0:
1383                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1384             shutil.rmtree(tmpdir)
1385         except Exception, e:
1386             print "foobar2 (%s)" % e
1387             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1388
1389     ###########################################################################
1390     def ensure_hashes(self):
1391         # Make sure we recognise the format of the Files: field in the .changes
1392         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1393         if len(format) == 2:
1394             format = int(format[0]), int(format[1])
1395         else:
1396             format = int(float(format[0])), 0
1397
1398         # We need to deal with the original changes blob, as the fields we need
1399         # might not be in the changes dict serialised into the .dak anymore.
1400         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1401
1402         # Copy the checksums over to the current changes dict.  This will keep
1403         # the existing modifications to it intact.
1404         for field in orig_changes:
1405             if field.startswith('checksums-'):
1406                 self.pkg.changes[field] = orig_changes[field]
1407
1408         # Check for unsupported hashes
1409         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1410             self.rejects.append(j)
1411
1412         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1413             self.rejects.append(j)
1414
1415         # We have to calculate the hash if we have an earlier changes version than
1416         # the hash appears in rather than require it exist in the changes file
1417         for hashname, hashfunc, version in utils.known_hashes:
1418             # TODO: Move _ensure_changes_hash into this class
1419             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1420                 self.rejects.append(j)
1421             if "source" in self.pkg.changes["architecture"]:
1422                 # TODO: Move _ensure_dsc_hash into this class
1423                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1424                     self.rejects.append(j)
1425
1426     def check_hashes(self):
1427         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1428             self.rejects.append(m)
1429
1430         for m in utils.check_size(".changes", self.pkg.files):
1431             self.rejects.append(m)
1432
1433         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1434             self.rejects.append(m)
1435
1436         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1437             self.rejects.append(m)
1438
1439         self.ensure_hashes()
1440
1441     ###########################################################################
1442
1443     def ensure_orig(self, target_dir='.', session=None):
1444         """
1445         Ensures that all orig files mentioned in the changes file are present
1446         in target_dir. If they do not exist, they are symlinked into place.
1447
1448         An list containing the symlinks that were created are returned (so they
1449         can be removed).
1450         """
1451
1452         symlinked = []
1453         cnf = Config()
1454
1455         for filename, entry in self.pkg.dsc_files.iteritems():
1456             if not re_is_orig_source.match(filename):
1457                 # File is not an orig; ignore
1458                 continue
1459
1460             if os.path.exists(filename):
1461                 # File exists, no need to continue
1462                 continue
1463
1464             def symlink_if_valid(path):
1465                 f = utils.open_file(path)
1466                 md5sum = apt_pkg.md5sum(f)
1467                 f.close()
1468
1469                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1470                 expected = (int(entry['size']), entry['md5sum'])
1471
1472                 if fingerprint != expected:
1473                     return False
1474
1475                 dest = os.path.join(target_dir, filename)
1476
1477                 os.symlink(path, dest)
1478                 symlinked.append(dest)
1479
1480                 return True
1481
1482             session_ = session
1483             if session is None:
1484                 session_ = DBConn().session()
1485
1486             found = False
1487
1488             # Look in the pool
1489             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1490                 poolfile_path = os.path.join(
1491                     poolfile.location.path, poolfile.filename
1492                 )
1493
1494                 if symlink_if_valid(poolfile_path):
1495                     found = True
1496                     break
1497
1498             if session is None:
1499                 session_.close()
1500
1501             if found:
1502                 continue
1503
1504             # Look in some other queues for the file
1505             queue_names = ['new', 'byhand',
1506                            'proposedupdates', 'oldproposedupdates',
1507                            'embargoed', 'unembargoed']
1508
1509             for queue_name in queue_names:
1510                 queue = get_policy_queue(queue_name, session)
1511                 if not queue:
1512                     continue
1513
1514                 queuefile_path = os.path.join(queue.path, filename)
1515
1516                 if not os.path.exists(queuefile_path):
1517                     # Does not exist in this queue
1518                     continue
1519
1520                 if symlink_if_valid(queuefile_path):
1521                     break
1522
1523         return symlinked
1524
1525     ###########################################################################
1526
1527     def check_lintian(self):
1528         """
1529         Extends self.rejects by checking the output of lintian against tags
1530         specified in Dinstall::LintianTags.
1531         """
1532
1533         cnf = Config()
1534
1535         # Don't reject binary uploads
1536         if not self.pkg.changes['architecture'].has_key('source'):
1537             return
1538
1539         # Only check some distributions
1540         for dist in ('unstable', 'experimental'):
1541             if dist in self.pkg.changes['distribution']:
1542                 break
1543         else:
1544             return
1545
1546         # If we do not have a tagfile, don't do anything
1547         tagfile = cnf.get("Dinstall::LintianTags")
1548         if not tagfile:
1549             return
1550
1551         # Parse the yaml file
1552         sourcefile = file(tagfile, 'r')
1553         sourcecontent = sourcefile.read()
1554         sourcefile.close()
1555
1556         try:
1557             lintiantags = yaml.load(sourcecontent)['lintian']
1558         except yaml.YAMLError, msg:
1559             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1560             return
1561
1562         # Try and find all orig mentioned in the .dsc
1563         symlinked = self.ensure_orig()
1564
1565         # Setup the input file for lintian
1566         fd, temp_filename = utils.temp_filename()
1567         temptagfile = os.fdopen(fd, 'w')
1568         for tags in lintiantags.values():
1569             temptagfile.writelines(['%s\n' % x for x in tags])
1570         temptagfile.close()
1571
1572         try:
1573             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1574                 (temp_filename, self.pkg.changes_file)
1575
1576             result, output = commands.getstatusoutput(cmd)
1577         finally:
1578             # Remove our tempfile and any symlinks we created
1579             os.unlink(temp_filename)
1580
1581             for symlink in symlinked:
1582                 os.unlink(symlink)
1583
1584         if result == 2:
1585             utils.warn("lintian failed for %s [return code: %s]." % \
1586                 (self.pkg.changes_file, result))
1587             utils.warn(utils.prefix_multi_line_string(output, \
1588                 " [possible output:] "))
1589
1590         def log(*txt):
1591             if self.logger:
1592                 self.logger.log(
1593                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1594                 )
1595
1596         # Generate messages
1597         parsed_tags = parse_lintian_output(output)
1598         self.rejects.extend(
1599             generate_reject_messages(parsed_tags, lintiantags, log=log)
1600         )
1601
1602     ###########################################################################
1603     def check_urgency(self):
1604         cnf = Config()
1605         if self.pkg.changes["architecture"].has_key("source"):
1606             if not self.pkg.changes.has_key("urgency"):
1607                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1608             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1609             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1610                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1611                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1612                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1613
1614     ###########################################################################
1615
1616     # Sanity check the time stamps of files inside debs.
1617     # [Files in the near future cause ugly warnings and extreme time
1618     #  travel can cause errors on extraction]
1619
1620     def check_timestamps(self):
1621         Cnf = Config()
1622
1623         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1624         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1625         tar = TarTime(future_cutoff, past_cutoff)
1626
1627         for filename, entry in self.pkg.files.items():
1628             if entry["type"] == "deb":
1629                 tar.reset()
1630                 try:
1631                     deb = apt_inst.DebFile(filename)
1632                     deb.control.go(tar.callback)
1633
1634                     future_files = tar.future_files.keys()
1635                     if future_files:
1636                         num_future_files = len(future_files)
1637                         future_file = future_files[0]
1638                         future_date = tar.future_files[future_file]
1639                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1640                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1641
1642                     ancient_files = tar.ancient_files.keys()
1643                     if ancient_files:
1644                         num_ancient_files = len(ancient_files)
1645                         ancient_file = ancient_files[0]
1646                         ancient_date = tar.ancient_files[ancient_file]
1647                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1648                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1649                 except:
1650                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1651
1652     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1653         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1654             sponsored = False
1655         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1656             sponsored = False
1657             if uid_name == "":
1658                 sponsored = True
1659         else:
1660             sponsored = True
1661             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1662                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1663                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1664                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1665                         self.pkg.changes["sponsoremail"] = uid_email
1666
1667         return sponsored
1668
1669
1670     ###########################################################################
1671     # check_signed_by_key checks
1672     ###########################################################################
1673
1674     def check_signed_by_key(self):
1675         """Ensure the .changes is signed by an authorized uploader."""
1676         session = DBConn().session()
1677
1678         # First of all we check that the person has proper upload permissions
1679         # and that this upload isn't blocked
1680         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1681
1682         if fpr is None:
1683             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1684             return
1685
1686         # TODO: Check that import-keyring adds UIDs properly
1687         if not fpr.uid:
1688             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1689             return
1690
1691         # Check that the fingerprint which uploaded has permission to do so
1692         self.check_upload_permissions(fpr, session)
1693
1694         # Check that this package is not in a transition
1695         self.check_transition(session)
1696
1697         session.close()
1698
1699
1700     def check_upload_permissions(self, fpr, session):
1701         # Check any one-off upload blocks
1702         self.check_upload_blocks(fpr, session)
1703
1704         # If the source_acl is None, source is never allowed
1705         if fpr.source_acl is None:
1706             if self.pkg.changes["architecture"].has_key("source"):
1707                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1708                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1709                 self.rejects.append(rej)
1710                 return
1711         # Do DM as a special case
1712         # DM is a special case unfortunately, so we check it first
1713         # (keys with no source access get more access than DMs in one
1714         #  way; DMs can only upload for their packages whether source
1715         #  or binary, whereas keys with no access might be able to
1716         #  upload some binaries)
1717         elif fpr.source_acl.access_level == 'dm':
1718             self.check_dm_upload(fpr, session)
1719         else:
1720             # If not a DM, we allow full upload rights
1721             uid_email = "%s@debian.org" % (fpr.uid.uid)
1722             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1723
1724
1725         # Check binary upload permissions
1726         # By this point we know that DMs can't have got here unless they
1727         # are allowed to deal with the package concerned so just apply
1728         # normal checks
1729         if fpr.binary_acl.access_level == 'full':
1730             return
1731
1732         # Otherwise we're in the map case
1733         tmparches = self.pkg.changes["architecture"].copy()
1734         tmparches.pop('source', None)
1735
1736         for bam in fpr.binary_acl_map:
1737             tmparches.pop(bam.architecture.arch_string, None)
1738
1739         if len(tmparches.keys()) > 0:
1740             if fpr.binary_reject:
1741                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1742                 if len(tmparches.keys()) == 1:
1743                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1744                 else:
1745                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1746                 self.rejects.append(rej)
1747             else:
1748                 # TODO: This is where we'll implement reject vs throw away binaries later
1749                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1750                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1751                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1752                 self.rejects.append(rej)
1753
1754
1755     def check_upload_blocks(self, fpr, session):
1756         """Check whether any upload blocks apply to this source, source
1757            version, uid / fpr combination"""
1758
1759         def block_rej_template(fb):
1760             rej = 'Manual upload block in place for package %s' % fb.source
1761             if fb.version is not None:
1762                 rej += ', version %s' % fb.version
1763             return rej
1764
1765         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1766             # version is None if the block applies to all versions
1767             if fb.version is None or fb.version == self.pkg.changes['version']:
1768                 # Check both fpr and uid - either is enough to cause a reject
1769                 if fb.fpr is not None:
1770                     if fb.fpr.fingerprint == fpr.fingerprint:
1771                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1772                 if fb.uid is not None:
1773                     if fb.uid == fpr.uid:
1774                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1775
1776
1777     def check_dm_upload(self, fpr, session):
1778         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1779         ## none of the uploaded packages are NEW
1780         rej = False
1781         for f in self.pkg.files.keys():
1782             if self.pkg.files[f].has_key("byhand"):
1783                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1784                 rej = True
1785             if self.pkg.files[f].has_key("new"):
1786                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1787                 rej = True
1788
1789         if rej:
1790             return
1791
1792         r = get_newest_source(self.pkg.changes["source"], session)
1793
1794         if r is None:
1795             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1796             self.rejects.append(rej)
1797             return
1798
1799         if not r.dm_upload_allowed:
1800             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1801             self.rejects.append(rej)
1802             return
1803
1804         ## the Maintainer: field of the uploaded .changes file corresponds with
1805         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1806         ## uploads)
1807         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1808             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1809
1810         ## the most recent version of the package uploaded to unstable or
1811         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1812         ## non-developer maintainers cannot NMU or hijack packages)
1813
1814         # uploader includes the maintainer
1815         accept = False
1816         for uploader in r.uploaders:
1817             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1818             # Eww - I hope we never have two people with the same name in Debian
1819             if email == fpr.uid.uid or name == fpr.uid.name:
1820                 accept = True
1821                 break
1822
1823         if not accept:
1824             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1825             return
1826
1827         ## none of the packages are being taken over from other source packages
1828         for b in self.pkg.changes["binary"].keys():
1829             for suite in self.pkg.changes["distribution"].keys():
1830                 for s in get_source_by_package_and_suite(b, suite, session):
1831                     if s.source != self.pkg.changes["source"]:
1832                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1833
1834
1835
1836     def check_transition(self, session):
1837         cnf = Config()
1838
1839         sourcepkg = self.pkg.changes["source"]
1840
1841         # No sourceful upload -> no need to do anything else, direct return
1842         # We also work with unstable uploads, not experimental or those going to some
1843         # proposed-updates queue
1844         if "source" not in self.pkg.changes["architecture"] or \
1845            "unstable" not in self.pkg.changes["distribution"]:
1846             return
1847
1848         # Also only check if there is a file defined (and existant) with
1849         # checks.
1850         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1851         if transpath == "" or not os.path.exists(transpath):
1852             return
1853
1854         # Parse the yaml file
1855         sourcefile = file(transpath, 'r')
1856         sourcecontent = sourcefile.read()
1857         try:
1858             transitions = yaml.load(sourcecontent)
1859         except yaml.YAMLError, msg:
1860             # This shouldn't happen, there is a wrapper to edit the file which
1861             # checks it, but we prefer to be safe than ending up rejecting
1862             # everything.
1863             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1864             return
1865
1866         # Now look through all defined transitions
1867         for trans in transitions:
1868             t = transitions[trans]
1869             source = t["source"]
1870             expected = t["new"]
1871
1872             # Will be None if nothing is in testing.
1873             current = get_source_in_suite(source, "testing", session)
1874             if current is not None:
1875                 compare = apt_pkg.VersionCompare(current.version, expected)
1876
1877             if current is None or compare < 0:
1878                 # This is still valid, the current version in testing is older than
1879                 # the new version we wait for, or there is none in testing yet
1880
1881                 # Check if the source we look at is affected by this.
1882                 if sourcepkg in t['packages']:
1883                     # The source is affected, lets reject it.
1884
1885                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1886                         sourcepkg, trans)
1887
1888                     if current is not None:
1889                         currentlymsg = "at version %s" % (current.version)
1890                     else:
1891                         currentlymsg = "not present in testing"
1892
1893                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1894
1895                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1896 is part of a testing transition designed to get %s migrated (it is
1897 currently %s, we need version %s).  This transition is managed by the
1898 Release Team, and %s is the Release-Team member responsible for it.
1899 Please mail debian-release@lists.debian.org or contact %s directly if you
1900 need further assistance.  You might want to upload to experimental until this
1901 transition is done."""
1902                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1903
1904                     self.rejects.append(rejectmsg)
1905                     return
1906
1907     ###########################################################################
1908     # End check_signed_by_key checks
1909     ###########################################################################
1910
1911     def build_summaries(self):
1912         """ Build a summary of changes the upload introduces. """
1913
1914         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1915
1916         short_summary = summary
1917
1918         # This is for direport's benefit...
1919         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1920
1921         if byhand or new:
1922             summary += "Changes: " + f
1923
1924         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1925
1926         summary += self.announce(short_summary, 0)
1927
1928         return (summary, short_summary)
1929
1930     ###########################################################################
1931
1932     def close_bugs(self, summary, action):
1933         """
1934         Send mail to close bugs as instructed by the closes field in the changes file.
1935         Also add a line to summary if any work was done.
1936
1937         @type summary: string
1938         @param summary: summary text, as given by L{build_summaries}
1939
1940         @type action: bool
1941         @param action: Set to false no real action will be done.
1942
1943         @rtype: string
1944         @return: summary. If action was taken, extended by the list of closed bugs.
1945
1946         """
1947
1948         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1949
1950         bugs = self.pkg.changes["closes"].keys()
1951
1952         if not bugs:
1953             return summary
1954
1955         bugs.sort()
1956         summary += "Closing bugs: "
1957         for bug in bugs:
1958             summary += "%s " % (bug)
1959             if action:
1960                 self.update_subst()
1961                 self.Subst["__BUG_NUMBER__"] = bug
1962                 if self.pkg.changes["distribution"].has_key("stable"):
1963                     self.Subst["__STABLE_WARNING__"] = """
1964 Note that this package is not part of the released stable Debian
1965 distribution.  It may have dependencies on other unreleased software,
1966 or other instabilities.  Please take care if you wish to install it.
1967 The update will eventually make its way into the next released Debian
1968 distribution."""
1969                 else:
1970                     self.Subst["__STABLE_WARNING__"] = ""
1971                 mail_message = utils.TemplateSubst(self.Subst, template)
1972                 utils.send_mail(mail_message)
1973
1974                 # Clear up after ourselves
1975                 del self.Subst["__BUG_NUMBER__"]
1976                 del self.Subst["__STABLE_WARNING__"]
1977
1978         if action and self.logger:
1979             self.logger.log(["closing bugs"] + bugs)
1980
1981         summary += "\n"
1982
1983         return summary
1984
1985     ###########################################################################
1986
1987     def announce(self, short_summary, action):
1988         """
1989         Send an announce mail about a new upload.
1990
1991         @type short_summary: string
1992         @param short_summary: Short summary text to include in the mail
1993
1994         @type action: bool
1995         @param action: Set to false no real action will be done.
1996
1997         @rtype: string
1998         @return: Textstring about action taken.
1999
2000         """
2001
2002         cnf = Config()
2003         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2004
2005         # Only do announcements for source uploads with a recent dpkg-dev installed
2006         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2007            self.pkg.changes["architecture"].has_key("source"):
2008             return ""
2009
2010         lists_done = {}
2011         summary = ""
2012
2013         self.Subst["__SHORT_SUMMARY__"] = short_summary
2014
2015         # Skip all of this if not sending mail to avoid confusing people
2016         if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2017             return summary
2018
2019         for dist in self.pkg.changes["distribution"].keys():
2020             suite = get_suite(dist)
2021             if suite is None: continue
2022             announce_list = suite.announce
2023             if announce_list == "" or lists_done.has_key(announce_list):
2024                 continue
2025
2026             lists_done[announce_list] = 1
2027             summary += "Announcing to %s\n" % (announce_list)
2028
2029             if action:
2030                 self.update_subst()
2031                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2032                 if cnf.get("Dinstall::TrackingServer") and \
2033                    self.pkg.changes["architecture"].has_key("source"):
2034                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2035                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2036
2037                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2038                 utils.send_mail(mail_message)
2039
2040                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2041
2042         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2043             summary = self.close_bugs(summary, action)
2044
2045         del self.Subst["__SHORT_SUMMARY__"]
2046
2047         return summary
2048
2049     ###########################################################################
2050     @session_wrapper
2051     def accept (self, summary, short_summary, session=None):
2052         """
2053         Accept an upload.
2054
2055         This moves all files referenced from the .changes into the pool,
2056         sends the accepted mail, announces to lists, closes bugs and
2057         also checks for override disparities. If enabled it will write out
2058         the version history for the BTS Version Tracking and will finally call
2059         L{queue_build}.
2060
2061         @type summary: string
2062         @param summary: Summary text
2063
2064         @type short_summary: string
2065         @param short_summary: Short summary
2066         """
2067
2068         cnf = Config()
2069         stats = SummaryStats()
2070
2071         print "Installing."
2072         self.logger.log(["installing changes", self.pkg.changes_file])
2073
2074         binaries = []
2075         poolfiles = []
2076
2077         # Add the .dsc file to the DB first
2078         for newfile, entry in self.pkg.files.items():
2079             if entry["type"] == "dsc":
2080                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2081                 for j in pfs:
2082                     poolfiles.append(j)
2083
2084         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2085         for newfile, entry in self.pkg.files.items():
2086             if entry["type"] == "deb":
2087                 b, pf = add_deb_to_db(self, newfile, session)
2088                 binaries.append(b)
2089                 poolfiles.append(pf)
2090
2091         # If this is a sourceful diff only upload that is moving
2092         # cross-component we need to copy the .orig files into the new
2093         # component too for the same reasons as above.
2094         # XXX: mhy: I think this should be in add_dsc_to_db
2095         if self.pkg.changes["architecture"].has_key("source"):
2096             for orig_file in self.pkg.orig_files.keys():
2097                 if not self.pkg.orig_files[orig_file].has_key("id"):
2098                     continue # Skip if it's not in the pool
2099                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2100                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2101                     continue # Skip if the location didn't change
2102
2103                 # Do the move
2104                 oldf = get_poolfile_by_id(orig_file_id, session)
2105                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2106                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2107                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2108
2109                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2110
2111                 # TODO: Care about size/md5sum collisions etc
2112                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2113
2114                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2115                 if newf is None:
2116                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2117                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2118
2119                     session.flush()
2120
2121                     # Don't reference the old file from this changes
2122                     for p in poolfiles:
2123                         if p.file_id == oldf.file_id:
2124                             poolfiles.remove(p)
2125
2126                     poolfiles.append(newf)
2127
2128                     # Fix up the DSC references
2129                     toremove = []
2130
2131                     for df in source.srcfiles:
2132                         if df.poolfile.file_id == oldf.file_id:
2133                             # Add a new DSC entry and mark the old one for deletion
2134                             # Don't do it in the loop so we don't change the thing we're iterating over
2135                             newdscf = DSCFile()
2136                             newdscf.source_id = source.source_id
2137                             newdscf.poolfile_id = newf.file_id
2138                             session.add(newdscf)
2139
2140                             toremove.append(df)
2141
2142                     for df in toremove:
2143                         session.delete(df)
2144
2145                     # Flush our changes
2146                     session.flush()
2147
2148                     # Make sure that our source object is up-to-date
2149                     session.expire(source)
2150
2151         # Add changelog information to the database
2152         self.store_changelog()
2153
2154         # Install the files into the pool
2155         for newfile, entry in self.pkg.files.items():
2156             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2157             utils.move(newfile, destination)
2158             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2159             stats.accept_bytes += float(entry["size"])
2160
2161         # Copy the .changes file across for suite which need it.
2162         copy_changes = dict([(x.copychanges, '')
2163                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2164                              if x.copychanges is not None])
2165
2166         for dest in copy_changes.keys():
2167             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2168
2169         # We're done - commit the database changes
2170         session.commit()
2171         # Our SQL session will automatically start a new transaction after
2172         # the last commit
2173
2174         # Now ensure that the metadata has been added
2175         # This has to be done after we copy the files into the pool
2176         # For source if we have it:
2177         if self.pkg.changes["architecture"].has_key("source"):
2178             import_metadata_into_db(source, session)
2179
2180         # Now for any of our binaries
2181         for b in binaries:
2182             import_metadata_into_db(b, session)
2183
2184         session.commit()
2185
2186         # Move the .changes into the 'done' directory
2187         ye, mo, da = time.gmtime()[0:3]
2188         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2189         if not os.path.isdir(donedir):
2190             os.makedirs(donedir)
2191
2192         utils.move(self.pkg.changes_file,
2193                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2194
2195         if self.pkg.changes["architecture"].has_key("source"):
2196             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2197
2198         self.update_subst()
2199         self.Subst["__SUMMARY__"] = summary
2200         mail_message = utils.TemplateSubst(self.Subst,
2201                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2202         utils.send_mail(mail_message)
2203         self.announce(short_summary, 1)
2204
2205         ## Helper stuff for DebBugs Version Tracking
2206         if cnf.Find("Dir::BTSVersionTrack"):
2207             if self.pkg.changes["architecture"].has_key("source"):
2208                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2209                 version_history = os.fdopen(fd, 'w')
2210                 version_history.write(self.pkg.dsc["bts changelog"])
2211                 version_history.close()
2212                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2213                                       self.pkg.changes_file[:-8]+".versions")
2214                 os.rename(temp_filename, filename)
2215                 os.chmod(filename, 0644)
2216
2217             # Write out the binary -> source mapping.
2218             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2219             debinfo = os.fdopen(fd, 'w')
2220             for name, entry in sorted(self.pkg.files.items()):
2221                 if entry["type"] == "deb":
2222                     line = " ".join([entry["package"], entry["version"],
2223                                      entry["architecture"], entry["source package"],
2224                                      entry["source version"]])
2225                     debinfo.write(line+"\n")
2226             debinfo.close()
2227             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2228                                   self.pkg.changes_file[:-8]+".debinfo")
2229             os.rename(temp_filename, filename)
2230             os.chmod(filename, 0644)
2231
2232         session.commit()
2233
2234         # Set up our copy queues (e.g. buildd queues)
2235         for suite_name in self.pkg.changes["distribution"].keys():
2236             suite = get_suite(suite_name, session)
2237             for q in suite.copy_queues:
2238                 for f in poolfiles:
2239                     q.add_file_from_pool(f)
2240
2241         session.commit()
2242
2243         # Finally...
2244         stats.accept_count += 1
2245
2246     def check_override(self):
2247         """
2248         Checks override entries for validity. Mails "Override disparity" warnings,
2249         if that feature is enabled.
2250
2251         Abandons the check if
2252           - override disparity checks are disabled
2253           - mail sending is disabled
2254         """
2255
2256         cnf = Config()
2257
2258         # Abandon the check if override disparity checks have been disabled
2259         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2260             return
2261
2262         summary = self.pkg.check_override()
2263
2264         if summary == "":
2265             return
2266
2267         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2268
2269         self.update_subst()
2270         self.Subst["__SUMMARY__"] = summary
2271         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2272         utils.send_mail(mail_message)
2273         del self.Subst["__SUMMARY__"]
2274
2275     ###########################################################################
2276
2277     def remove(self, from_dir=None):
2278         """
2279         Used (for instance) in p-u to remove the package from unchecked
2280
2281         Also removes the package from holding area.
2282         """
2283         if from_dir is None:
2284             from_dir = self.pkg.directory
2285         h = Holding()
2286
2287         for f in self.pkg.files.keys():
2288             os.unlink(os.path.join(from_dir, f))
2289             if os.path.exists(os.path.join(h.holding_dir, f)):
2290                 os.unlink(os.path.join(h.holding_dir, f))
2291
2292         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2293         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2294             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2295
2296     ###########################################################################
2297
2298     def move_to_queue (self, queue):
2299         """
2300         Move files to a destination queue using the permissions in the table
2301         """
2302         h = Holding()
2303         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2304                    queue.path, perms=int(queue.change_perms, 8))
2305         for f in self.pkg.files.keys():
2306             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2307
2308     ###########################################################################
2309
2310     def force_reject(self, reject_files):
2311         """
2312         Forcefully move files from the current directory to the
2313         reject directory.  If any file already exists in the reject
2314         directory it will be moved to the morgue to make way for
2315         the new file.
2316
2317         @type reject_files: dict
2318         @param reject_files: file dictionary
2319
2320         """
2321
2322         cnf = Config()
2323
2324         for file_entry in reject_files:
2325             # Skip any files which don't exist or which we don't have permission to copy.
2326             if os.access(file_entry, os.R_OK) == 0:
2327                 continue
2328
2329             dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2330
2331             try:
2332                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2333             except OSError, e:
2334                 # File exists?  Let's find a new name by adding a number
2335                 if e.errno == errno.EEXIST:
2336                     try:
2337                         dest_file = utils.find_next_free(dest_file, 255)
2338                     except NoFreeFilenameError:
2339                         # Something's either gone badly Pete Tong, or
2340                         # someone is trying to exploit us.
2341                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2342                         return
2343
2344                     # Make sure we really got it
2345                     try:
2346                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2347                     except OSError, e:
2348                         # Likewise
2349                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2350                         return
2351                 else:
2352                     raise
2353             # If we got here, we own the destination file, so we can
2354             # safely overwrite it.
2355             utils.move(file_entry, dest_file, 1, perms=0660)
2356             os.close(dest_fd)
2357
2358     ###########################################################################
2359     def do_reject (self, manual=0, reject_message="", notes=""):
2360         """
2361         Reject an upload. If called without a reject message or C{manual} is
2362         true, spawn an editor so the user can write one.
2363
2364         @type manual: bool
2365         @param manual: manual or automated rejection
2366
2367         @type reject_message: string
2368         @param reject_message: A reject message
2369
2370         @return: 0
2371
2372         """
2373         # If we weren't given a manual rejection message, spawn an
2374         # editor so the user can add one in...
2375         if manual and not reject_message:
2376             (fd, temp_filename) = utils.temp_filename()
2377             temp_file = os.fdopen(fd, 'w')
2378             if len(notes) > 0:
2379                 for note in notes:
2380                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2381                                     % (note.author, note.version, note.notedate, note.comment))
2382             temp_file.close()
2383             editor = os.environ.get("EDITOR","vi")
2384             answer = 'E'
2385             while answer == 'E':
2386                 os.system("%s %s" % (editor, temp_filename))
2387                 temp_fh = utils.open_file(temp_filename)
2388                 reject_message = "".join(temp_fh.readlines())
2389                 temp_fh.close()
2390                 print "Reject message:"
2391                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2392                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2393                 answer = "XXX"
2394                 while prompt.find(answer) == -1:
2395                     answer = utils.our_raw_input(prompt)
2396                     m = re_default_answer.search(prompt)
2397                     if answer == "":
2398                         answer = m.group(1)
2399                     answer = answer[:1].upper()
2400             os.unlink(temp_filename)
2401             if answer == 'A':
2402                 return 1
2403             elif answer == 'Q':
2404                 sys.exit(0)
2405
2406         print "Rejecting.\n"
2407
2408         cnf = Config()
2409
2410         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2411         reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2412
2413         # Move all the files into the reject directory
2414         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2415         self.force_reject(reject_files)
2416
2417         # If we fail here someone is probably trying to exploit the race
2418         # so let's just raise an exception ...
2419         if os.path.exists(reason_filename):
2420             os.unlink(reason_filename)
2421         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2422
2423         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2424
2425         self.update_subst()
2426         if not manual:
2427             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2428             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2429             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2430             os.write(reason_fd, reject_message)
2431             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2432         else:
2433             # Build up the rejection email
2434             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2435             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2436             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2437             self.Subst["__REJECT_MESSAGE__"] = ""
2438             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2439             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2440             # Write the rejection email out as the <foo>.reason file
2441             os.write(reason_fd, reject_mail_message)
2442
2443         del self.Subst["__REJECTOR_ADDRESS__"]
2444         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2445         del self.Subst["__CC__"]
2446
2447         os.close(reason_fd)
2448
2449         # Send the rejection mail
2450         utils.send_mail(reject_mail_message)
2451
2452         if self.logger:
2453             self.logger.log(["rejected", self.pkg.changes_file])
2454
2455         stats = SummaryStats()
2456         stats.reject_count += 1
2457         return 0
2458
2459     ################################################################################
2460     def in_override_p(self, package, component, suite, binary_type, filename, session):
2461         """
2462         Check if a package already has override entries in the DB
2463
2464         @type package: string
2465         @param package: package name
2466
2467         @type component: string
2468         @param component: database id of the component
2469
2470         @type suite: int
2471         @param suite: database id of the suite
2472
2473         @type binary_type: string
2474         @param binary_type: type of the package
2475
2476         @type filename: string
2477         @param filename: filename we check
2478
2479         @return: the database result. But noone cares anyway.
2480
2481         """
2482
2483         cnf = Config()
2484
2485         if binary_type == "": # must be source
2486             file_type = "dsc"
2487         else:
2488             file_type = binary_type
2489
2490         # Override suite name; used for example with proposed-updates
2491         oldsuite = get_suite(suite, session)
2492         if (not oldsuite is None) and oldsuite.overridesuite:
2493             suite = oldsuite.overridesuite
2494
2495         result = get_override(package, suite, component, file_type, session)
2496
2497         # If checking for a source package fall back on the binary override type
2498         if file_type == "dsc" and len(result) < 1:
2499             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2500
2501         # Remember the section and priority so we can check them later if appropriate
2502         if len(result) > 0:
2503             result = result[0]
2504             self.pkg.files[filename]["override section"] = result.section.section
2505             self.pkg.files[filename]["override priority"] = result.priority.priority
2506             return result
2507
2508         return None
2509
2510     ################################################################################
2511     def get_anyversion(self, sv_list, suite):
2512         """
2513         @type sv_list: list
2514         @param sv_list: list of (suite, version) tuples to check
2515
2516         @type suite: string
2517         @param suite: suite name
2518
2519         Description: TODO
2520         """
2521         Cnf = Config()
2522         anyversion = None
2523         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2524         for (s, v) in sv_list:
2525             if s in [ x.lower() for x in anysuite ]:
2526                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2527                     anyversion = v
2528
2529         return anyversion
2530
2531     ################################################################################
2532
2533     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2534         """
2535         @type sv_list: list
2536         @param sv_list: list of (suite, version) tuples to check
2537
2538         @type filename: string
2539         @param filename: XXX
2540
2541         @type new_version: string
2542         @param new_version: XXX
2543
2544         Ensure versions are newer than existing packages in target
2545         suites and that cross-suite version checking rules as
2546         set out in the conf file are satisfied.
2547         """
2548
2549         cnf = Config()
2550
2551         # Check versions for each target suite
2552         for target_suite in self.pkg.changes["distribution"].keys():
2553             # Check we can find the target suite
2554             ts = get_suite(target_suite)
2555             if ts is None:
2556                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2557                 continue
2558
2559             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2560             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2561
2562             # Enforce "must be newer than target suite" even if conffile omits it
2563             if target_suite not in must_be_newer_than:
2564                 must_be_newer_than.append(target_suite)
2565
2566             for (suite, existent_version) in sv_list:
2567                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2568
2569                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2570                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2571
2572                 if suite in must_be_older_than and vercmp > -1:
2573                     cansave = 0
2574
2575                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2576                         # we really use the other suite, ignoring the conflicting one ...
2577                         addsuite = self.pkg.changes["distribution-version"][suite]
2578
2579                         add_version = self.get_anyversion(sv_list, addsuite)
2580                         target_version = self.get_anyversion(sv_list, target_suite)
2581
2582                         if not add_version:
2583                             # not add_version can only happen if we map to a suite
2584                             # that doesn't enhance the suite we're propup'ing from.
2585                             # so "propup-ver x a b c; map a d" is a problem only if
2586                             # d doesn't enhance a.
2587                             #
2588                             # i think we could always propagate in this case, rather
2589                             # than complaining. either way, this isn't a REJECT issue
2590                             #
2591                             # And - we really should complain to the dorks who configured dak
2592                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2593                             self.pkg.changes.setdefault("propdistribution", {})
2594                             self.pkg.changes["propdistribution"][addsuite] = 1
2595                             cansave = 1
2596                         elif not target_version:
2597                             # not targets_version is true when the package is NEW
2598                             # we could just stick with the "...old version..." REJECT
2599                             # for this, I think.
2600                             self.rejects.append("Won't propogate NEW packages.")
2601                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2602                             # propogation would be redundant. no need to reject though.
2603                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2604                             cansave = 1
2605                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2606                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2607                             # propogate!!
2608                             self.warnings.append("Propogating upload to %s" % (addsuite))
2609                             self.pkg.changes.setdefault("propdistribution", {})
2610                             self.pkg.changes["propdistribution"][addsuite] = 1
2611                             cansave = 1
2612
2613                     if not cansave:
2614                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2615
2616     ################################################################################
2617     def check_binary_against_db(self, filename, session):
2618         # Ensure version is sane
2619         self.cross_suite_version_check( \
2620             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2621                 self.pkg.files[filename]["architecture"], session),
2622             filename, self.pkg.files[filename]["version"], sourceful=False)
2623
2624         # Check for any existing copies of the file
2625         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2626         q = q.filter_by(version=self.pkg.files[filename]["version"])
2627         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2628
2629         if q.count() > 0:
2630             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2631
2632     ################################################################################
2633
2634     def check_source_against_db(self, filename, session):
2635         source = self.pkg.dsc.get("source")
2636         version = self.pkg.dsc.get("version")
2637
2638         # Ensure version is sane
2639         self.cross_suite_version_check( \
2640             get_suite_version_by_source(source, session), filename, version,
2641             sourceful=True)
2642
2643     ################################################################################
2644     def check_dsc_against_db(self, filename, session):
2645         """
2646
2647         @warning: NB: this function can remove entries from the 'files' index [if
2648          the orig tarball is a duplicate of the one in the archive]; if
2649          you're iterating over 'files' and call this function as part of
2650          the loop, be sure to add a check to the top of the loop to
2651          ensure you haven't just tried to dereference the deleted entry.
2652
2653         """
2654
2655         Cnf = Config()
2656         self.pkg.orig_files = {} # XXX: do we need to clear it?
2657         orig_files = self.pkg.orig_files
2658
2659         # Try and find all files mentioned in the .dsc.  This has
2660         # to work harder to cope with the multiple possible
2661         # locations of an .orig.tar.gz.
2662         # The ordering on the select is needed to pick the newest orig
2663         # when it exists in multiple places.
2664         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2665             found = None
2666             if self.pkg.files.has_key(dsc_name):
2667                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2668                 actual_size = int(self.pkg.files[dsc_name]["size"])
2669                 found = "%s in incoming" % (dsc_name)
2670
2671                 # Check the file does not already exist in the archive
2672                 ql = get_poolfile_like_name(dsc_name, session)
2673
2674                 # Strip out anything that isn't '%s' or '/%s$'
2675                 for i in ql:
2676                     if not i.filename.endswith(dsc_name):
2677                         ql.remove(i)
2678
2679                 # "[dak] has not broken them.  [dak] has fixed a
2680                 # brokenness.  Your crappy hack exploited a bug in
2681                 # the old dinstall.
2682                 #
2683                 # "(Come on!  I thought it was always obvious that
2684                 # one just doesn't release different files with
2685                 # the same name and version.)"
2686                 #                        -- ajk@ on d-devel@l.d.o
2687
2688                 if len(ql) > 0:
2689                     # Ignore exact matches for .orig.tar.gz
2690                     match = 0
2691                     if re_is_orig_source.match(dsc_name):
2692                         for i in ql:
2693                             if self.pkg.files.has_key(dsc_name) and \
2694                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2695                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2696                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2697                                 # TODO: Don't delete the entry, just mark it as not needed
2698                                 # This would fix the stupidity of changing something we often iterate over
2699                                 # whilst we're doing it
2700                                 del self.pkg.files[dsc_name]
2701                                 dsc_entry["files id"] = i.file_id
2702                                 if not orig_files.has_key(dsc_name):
2703                                     orig_files[dsc_name] = {}
2704                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2705                                 match = 1
2706
2707                                 # Don't bitch that we couldn't find this file later
2708                                 try:
2709                                     self.later_check_files.remove(dsc_name)
2710                                 except ValueError:
2711                                     pass
2712
2713
2714                     if not match:
2715                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2716
2717             elif re_is_orig_source.match(dsc_name):
2718                 # Check in the pool
2719                 ql = get_poolfile_like_name(dsc_name, session)
2720
2721                 # Strip out anything that isn't '%s' or '/%s$'
2722                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2723                 for i in ql:
2724                     if not i.filename.endswith(dsc_name):
2725                         ql.remove(i)
2726
2727                 if len(ql) > 0:
2728                     # Unfortunately, we may get more than one match here if,
2729                     # for example, the package was in potato but had an -sa
2730                     # upload in woody.  So we need to choose the right one.
2731
2732                     # default to something sane in case we don't match any or have only one
2733                     x = ql[0]
2734
2735                     if len(ql) > 1:
2736                         for i in ql:
2737                             old_file = os.path.join(i.location.path, i.filename)
2738                             old_file_fh = utils.open_file(old_file)
2739                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2740                             old_file_fh.close()
2741                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2742                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2743                                 x = i
2744
2745                     old_file = os.path.join(i.location.path, i.filename)
2746                     old_file_fh = utils.open_file(old_file)
2747                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2748                     old_file_fh.close()
2749                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2750                     found = old_file
2751                     suite_type = x.location.archive_type
2752                     # need this for updating dsc_files in install()
2753                     dsc_entry["files id"] = x.file_id
2754                     # See install() in process-accepted...
2755                     if not orig_files.has_key(dsc_name):
2756                         orig_files[dsc_name] = {}
2757                     orig_files[dsc_name]["id"] = x.file_id
2758                     orig_files[dsc_name]["path"] = old_file
2759                     orig_files[dsc_name]["location"] = x.location.location_id
2760                 else:
2761                     # TODO: Determine queue list dynamically
2762                     # Not there? Check the queue directories...
2763                     for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2764                         queue = get_policy_queue(queue_name, session)
2765                         if not queue:
2766                             continue
2767
2768                         in_otherdir = os.path.join(queue.path, dsc_name)
2769
2770                         if os.path.exists(in_otherdir):
2771                             in_otherdir_fh = utils.open_file(in_otherdir)
2772                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2773                             in_otherdir_fh.close()
2774                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2775                             found = in_otherdir
2776                             if not orig_files.has_key(dsc_name):
2777                                 orig_files[dsc_name] = {}
2778                             orig_files[dsc_name]["path"] = in_otherdir
2779
2780                     if not found:
2781                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2782                         continue
2783             else:
2784                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2785                 continue
2786             if actual_md5 != dsc_entry["md5sum"]:
2787                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2788             if actual_size != int(dsc_entry["size"]):
2789                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2790
2791     ################################################################################
2792     # This is used by process-new and process-holding to recheck a changes file
2793     # at the time we're running.  It mainly wraps various other internal functions
2794     # and is similar to accepted_checks - these should probably be tidied up
2795     # and combined
2796     def recheck(self, session):
2797         cnf = Config()
2798         for f in self.pkg.files.keys():
2799             # The .orig.tar.gz can disappear out from under us is it's a
2800             # duplicate of one in the archive.
2801             if not self.pkg.files.has_key(f):
2802                 continue
2803
2804             entry = self.pkg.files[f]
2805
2806             # Check that the source still exists
2807             if entry["type"] == "deb":
2808                 source_version = entry["source version"]
2809                 source_package = entry["source package"]
2810                 if not self.pkg.changes["architecture"].has_key("source") \
2811                    and not source_exists(source_package, source_version, \
2812                     suites = self.pkg.changes["distribution"].keys(), session = session):
2813                     source_epochless_version = re_no_epoch.sub('', source_version)
2814                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2815                     found = False
2816                     for queue_name in ["embargoed", "unembargoed", "newstage"]:
2817                         queue = get_policy_queue(queue_name, session)
2818                         if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2819                             found = True
2820                     if not found:
2821                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2822
2823             # Version and file overwrite checks
2824             if entry["type"] == "deb":
2825                 self.check_binary_against_db(f, session)
2826             elif entry["type"] == "dsc":
2827                 self.check_source_against_db(f, session)
2828                 self.check_dsc_against_db(f, session)
2829
2830     ################################################################################
2831     def accepted_checks(self, overwrite_checks, session):
2832         # Recheck anything that relies on the database; since that's not
2833         # frozen between accept and our run time when called from p-a.
2834
2835         # overwrite_checks is set to False when installing to stable/oldstable
2836
2837         propogate={}
2838         nopropogate={}
2839
2840         # Find the .dsc (again)
2841         dsc_filename = None
2842         for f in self.pkg.files.keys():
2843             if self.pkg.files[f]["type"] == "dsc":
2844                 dsc_filename = f
2845
2846         for checkfile in self.pkg.files.keys():
2847             # The .orig.tar.gz can disappear out from under us is it's a
2848             # duplicate of one in the archive.
2849             if not self.pkg.files.has_key(checkfile):
2850                 continue
2851
2852             entry = self.pkg.files[checkfile]
2853
2854             # Check that the source still exists
2855             if entry["type"] == "deb":
2856                 source_version = entry["source version"]
2857                 source_package = entry["source package"]
2858                 if not self.pkg.changes["architecture"].has_key("source") \
2859                    and not source_exists(source_package, source_version, \
2860                     suites = self.pkg.changes["distribution"].keys(), \
2861                     session = session):
2862                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2863
2864             # Version and file overwrite checks
2865             if overwrite_checks:
2866                 if entry["type"] == "deb":
2867                     self.check_binary_against_db(checkfile, session)
2868                 elif entry["type"] == "dsc":
2869                     self.check_source_against_db(checkfile, session)
2870                     self.check_dsc_against_db(dsc_filename, session)
2871
2872             # propogate in the case it is in the override tables:
2873             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2874                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2875                     propogate[suite] = 1
2876                 else:
2877                     nopropogate[suite] = 1
2878
2879         for suite in propogate.keys():
2880             if suite in nopropogate:
2881                 continue
2882             self.pkg.changes["distribution"][suite] = 1
2883
2884         for checkfile in self.pkg.files.keys():
2885             # Check the package is still in the override tables
2886             for suite in self.pkg.changes["distribution"].keys():
2887                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2888                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2889
2890     ################################################################################
2891     # If any file of an upload has a recent mtime then chances are good
2892     # the file is still being uploaded.
2893
2894     def upload_too_new(self):
2895         cnf = Config()
2896         too_new = False
2897         # Move back to the original directory to get accurate time stamps
2898         cwd = os.getcwd()
2899         os.chdir(self.pkg.directory)
2900         file_list = self.pkg.files.keys()
2901         file_list.extend(self.pkg.dsc_files.keys())
2902         file_list.append(self.pkg.changes_file)
2903         for f in file_list:
2904             try:
2905                 last_modified = time.time()-os.path.getmtime(f)
2906                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2907                     too_new = True
2908                     break
2909             except:
2910                 pass
2911
2912         os.chdir(cwd)
2913         return too_new
2914
2915     def store_changelog(self):
2916
2917         # Skip binary-only upload if it is not a bin-NMU
2918         if not self.pkg.changes['architecture'].has_key('source'):
2919             from daklib.regexes import re_bin_only_nmu
2920             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2921                 return
2922
2923         session = DBConn().session()
2924
2925         # Check if upload already has a changelog entry
2926         query = """SELECT changelog_id FROM changes WHERE source = :source
2927                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2928         if session.execute(query, {'source': self.pkg.changes['source'], \
2929                                    'version': self.pkg.changes['version'], \
2930                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2931             session.commit()
2932             return
2933
2934         # Add current changelog text into changelogs_text table, return created ID
2935         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2936         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2937
2938         # Link ID to the upload available in changes table
2939         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2940                    AND version = :version AND architecture = :architecture"""
2941         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2942                                 'version': self.pkg.changes['version'], \
2943                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2944
2945         session.commit()