]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote-tracking branch 'ansgar/fix-update-db' into merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @rtype: dict
122     @return: dictionary of NEW components.
123
124     """
125     # TODO: This should all use the database instead of parsing the changes
126     # file again
127     new = {}
128     byhand = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Build up a list of potentially new things
135     for name, f in files.items():
136         # Keep a record of byhand elements
137         if f["section"] == "byhand":
138             byhand[name] = 1
139             continue
140
141         pkg = f["package"]
142         priority = f["priority"]
143         section = f["section"]
144         file_type = get_type(f, session)
145         component = f["component"]
146
147         if file_type == "dsc":
148             priority = "source"
149
150         if not new.has_key(pkg):
151             new[pkg] = {}
152             new[pkg]["priority"] = priority
153             new[pkg]["section"] = section
154             new[pkg]["type"] = file_type
155             new[pkg]["component"] = component
156             new[pkg]["files"] = []
157         else:
158             old_type = new[pkg]["type"]
159             if old_type != file_type:
160                 # source gets trumped by deb or udeb
161                 if old_type == "dsc":
162                     new[pkg]["priority"] = priority
163                     new[pkg]["section"] = section
164                     new[pkg]["type"] = file_type
165                     new[pkg]["component"] = component
166
167         new[pkg]["files"].append(name)
168
169         if f.has_key("othercomponents"):
170             new[pkg]["othercomponents"] = f["othercomponents"]
171
172     # Fix up the list of target suites
173     cnf = Config()
174     for suite in changes["suite"].keys():
175         oldsuite = get_suite(suite, session)
176         if not oldsuite:
177             print "WARNING: Invalid suite %s found" % suite
178             continue
179
180         if oldsuite.overridesuite:
181             newsuite = get_suite(oldsuite.overridesuite, session)
182
183             if newsuite:
184                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185                     oldsuite.overridesuite, suite)
186                 del changes["suite"][suite]
187                 changes["suite"][oldsuite.overridesuite] = 1
188             else:
189                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
190                     oldsuite.overridesuite, suite)
191
192     # Check for unprocessed byhand files
193     if dbchg is not None:
194         for b in byhand.keys():
195             # Find the file entry in the database
196             found = False
197             for f in dbchg.files:
198                 if f.filename == b:
199                     found = True
200                     # If it's processed, we can ignore it
201                     if f.processed:
202                         del byhand[b]
203                     break
204
205             if not found:
206                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
207
208     # Check for new stuff
209     for suite in changes["suite"].keys():
210         for pkg in new.keys():
211             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
212             if len(ql) > 0:
213                 for file_entry in new[pkg]["files"]:
214                     if files[file_entry].has_key("new"):
215                         del files[file_entry]["new"]
216                 del new[pkg]
217
218     if warn:
219         for s in ['stable', 'oldstable']:
220             if changes["suite"].has_key(s):
221                 print "WARNING: overrides will be added for %s!" % s
222         for pkg in new.keys():
223             if new[pkg].has_key("othercomponents"):
224                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
225
226     return new, byhand
227
228 ################################################################################
229
230 def check_valid(new, session = None):
231     """
232     Check if section and priority for NEW packages exist in database.
233     Additionally does sanity checks:
234       - debian-installer packages have to be udeb (or source)
235       - non debian-installer packages can not be udeb
236       - source priority can only be assigned to dsc file types
237
238     @type new: dict
239     @param new: Dict of new packages with their section, priority and type.
240
241     """
242     for pkg in new.keys():
243         section_name = new[pkg]["section"]
244         priority_name = new[pkg]["priority"]
245         file_type = new[pkg]["type"]
246
247         section = get_section(section_name, session)
248         if section is None:
249             new[pkg]["section id"] = -1
250         else:
251             new[pkg]["section id"] = section.section_id
252
253         priority = get_priority(priority_name, session)
254         if priority is None:
255             new[pkg]["priority id"] = -1
256         else:
257             new[pkg]["priority id"] = priority.priority_id
258
259         # Sanity checks
260         di = section_name.find("debian-installer") != -1
261
262         # If d-i, we must be udeb and vice-versa
263         if     (di and file_type not in ("udeb", "dsc")) or \
264            (not di and file_type == "udeb"):
265             new[pkg]["section id"] = -1
266
267         # If dsc we need to be source and vice-versa
268         if (priority == "source" and file_type != "dsc") or \
269            (priority != "source" and file_type == "dsc"):
270             new[pkg]["priority id"] = -1
271
272 ###############################################################################
273
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276     def __init__(self, future_cutoff, past_cutoff):
277         self.reset()
278         self.future_cutoff = future_cutoff
279         self.past_cutoff = past_cutoff
280
281     def reset(self):
282         self.future_files = {}
283         self.ancient_files = {}
284
285     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286         if MTime > self.future_cutoff:
287             self.future_files[Name] = MTime
288         if MTime < self.past_cutoff:
289             self.ancient_files[Name] = MTime
290
291 ###############################################################################
292
293 def prod_maintainer(notes, upload):
294     cnf = Config()
295
296     # Here we prepare an editor and get them ready to prod...
297     (fd, temp_filename) = utils.temp_filename()
298     temp_file = os.fdopen(fd, 'w')
299     for note in notes:
300         temp_file.write(note.comment)
301     temp_file.close()
302     editor = os.environ.get("EDITOR","vi")
303     answer = 'E'
304     while answer == 'E':
305         os.system("%s %s" % (editor, temp_filename))
306         temp_fh = utils.open_file(temp_filename)
307         prod_message = "".join(temp_fh.readlines())
308         temp_fh.close()
309         print "Prod message:"
310         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
311         prompt = "[P]rod, Edit, Abandon, Quit ?"
312         answer = "XXX"
313         while prompt.find(answer) == -1:
314             answer = utils.our_raw_input(prompt)
315             m = re_default_answer.search(prompt)
316             if answer == "":
317                 answer = m.group(1)
318             answer = answer[:1].upper()
319     os.unlink(temp_filename)
320     if answer == 'A':
321         return
322     elif answer == 'Q':
323         end()
324         sys.exit(0)
325     # Otherwise, do the proding...
326     user_email_address = utils.whoami() + " <%s>" % (
327         cnf["Dinstall::MyAdminAddress"])
328
329     Subst = upload.Subst
330
331     Subst["__FROM_ADDRESS__"] = user_email_address
332     Subst["__PROD_MESSAGE__"] = prod_message
333     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
334
335     prod_mail_message = utils.TemplateSubst(
336         Subst,cnf["Dir::Templates"]+"/process-new.prod")
337
338     # Send the prod mail
339     utils.send_mail(prod_mail_message)
340
341     print "Sent prodding message"
342
343 ################################################################################
344
345 def edit_note(note, upload, session, trainee=False):
346     # Write the current data to a temporary file
347     (fd, temp_filename) = utils.temp_filename()
348     editor = os.environ.get("EDITOR","vi")
349     answer = 'E'
350     while answer == 'E':
351         os.system("%s %s" % (editor, temp_filename))
352         temp_file = utils.open_file(temp_filename)
353         newnote = temp_file.read().rstrip()
354         temp_file.close()
355         print "New Note:"
356         print utils.prefix_multi_line_string(newnote,"  ")
357         prompt = "[D]one, Edit, Abandon, Quit ?"
358         answer = "XXX"
359         while prompt.find(answer) == -1:
360             answer = utils.our_raw_input(prompt)
361             m = re_default_answer.search(prompt)
362             if answer == "":
363                 answer = m.group(1)
364             answer = answer[:1].upper()
365     os.unlink(temp_filename)
366     if answer == 'A':
367         return
368     elif answer == 'Q':
369         end()
370         sys.exit(0)
371
372     comment = NewComment()
373     comment.package = upload.pkg.changes["source"]
374     comment.version = upload.pkg.changes["version"]
375     comment.comment = newnote
376     comment.author  = utils.whoami()
377     comment.trainee = trainee
378     session.add(comment)
379     session.commit()
380
381 ###############################################################################
382
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
385
386 def get_newest_source(source, session):
387     'returns the newest DBSource object in dm_suites'
388     ## the most recent version of the package uploaded to unstable or
389     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390     ## section of its control file
391     q = session.query(DBSource).filter_by(source = source). \
392         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393         order_by(desc('source.version'))
394     return q.first()
395
396 def get_suite_version_by_source(source, session):
397     'returns a list of tuples (suite_name, version) for source package'
398     q = session.query(Suite.suite_name, DBSource.version). \
399         join(Suite.sources).filter_by(source = source)
400     return q.all()
401
402 def get_source_by_package_and_suite(package, suite_name, session):
403     '''
404     returns a DBSource query filtered by DBBinary.package and this package's
405     suite_name
406     '''
407     return session.query(DBSource). \
408         join(DBSource.binaries).filter_by(package = package). \
409         join(DBBinary.suites).filter_by(suite_name = suite_name)
410
411 def get_suite_version_by_package(package, arch_string, session):
412     '''
413     returns a list of tuples (suite_name, version) for binary package and
414     arch_string
415     '''
416     return session.query(Suite.suite_name, DBBinary.version). \
417         join(Suite.binaries).filter_by(package = package). \
418         join(DBBinary.architecture). \
419         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
420
421 class Upload(object):
422     """
423     Everything that has to do with an upload processed.
424
425     """
426     def __init__(self):
427         self.logger = None
428         self.pkg = Changes()
429         self.reset()
430
431     ###########################################################################
432
433     def reset (self):
434         """ Reset a number of internal variables."""
435
436         # Initialize the substitution template map
437         cnf = Config()
438         self.Subst = {}
439         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
443
444         self.rejects = []
445         self.warnings = []
446         self.notes = []
447
448         self.later_check_files = []
449
450         self.pkg.reset()
451
452     def package_info(self):
453         """
454         Format various messages from this Upload to send to the maintainer.
455         """
456
457         msgs = (
458             ('Reject Reasons', self.rejects),
459             ('Warnings', self.warnings),
460             ('Notes', self.notes),
461         )
462
463         msg = ''
464         for title, messages in msgs:
465             if messages:
466                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
467         msg += '\n\n'
468
469         return msg
470
471     ###########################################################################
472     def update_subst(self):
473         """ Set up the per-package template substitution mappings """
474
475         cnf = Config()
476
477         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478         if not self.pkg.changes.has_key("architecture") or not \
479            isinstance(self.pkg.changes["architecture"], dict):
480             self.pkg.changes["architecture"] = { "Unknown" : "" }
481
482         # and maintainer2047 may not exist.
483         if not self.pkg.changes.has_key("maintainer2047"):
484             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
485
486         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
489
490         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491         if self.pkg.changes["architecture"].has_key("source") and \
492            self.pkg.changes["changedby822"] != "" and \
493            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
494
495             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
498         else:
499             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
502
503         # Process policy doesn't set the fingerprint field and I don't want to make it
504         # do it for now as I don't want to have to deal with the case where we accepted
505         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506         # the meantime so the package will be remarked as rejectable.  Urgh.
507         # TODO: Fix this properly
508         if self.pkg.changes.has_key('fingerprint'):
509             session = DBConn().session()
510             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512                 if self.pkg.changes.has_key("sponsoremail"):
513                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
514             session.close()
515
516         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
518
519         # Apply any global override of the Maintainer field
520         if cnf.get("Dinstall::OverrideMaintainer"):
521             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
523
524         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
528
529     ###########################################################################
530     def load_changes(self, filename):
531         """
532         Load a changes file and setup a dictionary around it. Also checks for mandantory
533         fields  within.
534
535         @type filename: string
536         @param filename: Changes filename, full path.
537
538         @rtype: boolean
539         @return: whether the changes file was valid or not.  We may want to
540                  reject even if this is True (see what gets put in self.rejects).
541                  This is simply to prevent us even trying things later which will
542                  fail because we couldn't properly parse the file.
543         """
544         Cnf = Config()
545         self.pkg.changes_file = filename
546
547         # Parse the .changes field into a dictionary
548         try:
549             self.pkg.changes.update(parse_changes(filename))
550         except CantOpenError:
551             self.rejects.append("%s: can't read file." % (filename))
552             return False
553         except ParseChangesError, line:
554             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555             return False
556         except ChangesUnicodeError:
557             self.rejects.append("%s: changes file not proper utf-8" % (filename))
558             return False
559
560         # Parse the Files field from the .changes into another dictionary
561         try:
562             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563         except ParseChangesError, line:
564             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
565             return False
566         except UnknownFormatError, format:
567             self.rejects.append("%s: unknown format '%s'." % (filename, format))
568             return False
569
570         # Check for mandatory fields
571         for i in ("distribution", "source", "binary", "architecture",
572                   "version", "maintainer", "files", "changes", "description"):
573             if not self.pkg.changes.has_key(i):
574                 # Avoid undefined errors later
575                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
576                 return False
577
578         # Strip a source version in brackets from the source field
579         if re_strip_srcver.search(self.pkg.changes["source"]):
580             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
581
582         # Ensure the source field is a valid package name.
583         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
585
586         # Split multi-value fields into a lower-level dictionary
587         for i in ("architecture", "distribution", "binary", "closes"):
588             o = self.pkg.changes.get(i, "")
589             if o != "":
590                 del self.pkg.changes[i]
591
592             self.pkg.changes[i] = {}
593
594             for j in o.split():
595                 self.pkg.changes[i][j] = 1
596
597         # Fix the Maintainer: field to be RFC822/2047 compatible
598         try:
599             (self.pkg.changes["maintainer822"],
600              self.pkg.changes["maintainer2047"],
601              self.pkg.changes["maintainername"],
602              self.pkg.changes["maintaineremail"]) = \
603                    fix_maintainer (self.pkg.changes["maintainer"])
604         except ParseMaintError, msg:
605             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606                    % (filename, self.pkg.changes["maintainer"], msg))
607
608         # ...likewise for the Changed-By: field if it exists.
609         try:
610             (self.pkg.changes["changedby822"],
611              self.pkg.changes["changedby2047"],
612              self.pkg.changes["changedbyname"],
613              self.pkg.changes["changedbyemail"]) = \
614                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
615         except ParseMaintError, msg:
616             self.pkg.changes["changedby822"] = ""
617             self.pkg.changes["changedby2047"] = ""
618             self.pkg.changes["changedbyname"] = ""
619             self.pkg.changes["changedbyemail"] = ""
620
621             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622                    % (filename, self.pkg.changes["changed-by"], msg))
623
624         # Ensure all the values in Closes: are numbers
625         if self.pkg.changes.has_key("closes"):
626             for i in self.pkg.changes["closes"].keys():
627                 if re_isanum.match (i) == None:
628                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
629
630         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
633
634         # Check the .changes is non-empty
635         if not self.pkg.files:
636             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
637             return False
638
639         # Changes was syntactically valid even if we'll reject
640         return True
641
642     ###########################################################################
643
644     def check_distributions(self):
645         "Check and map the Distribution field"
646
647         Cnf = Config()
648
649         # Handle suite mappings
650         for m in Cnf.ValueList("SuiteMappings"):
651             args = m.split()
652             mtype = args[0]
653             if mtype == "map" or mtype == "silent-map":
654                 (source, dest) = args[1:3]
655                 if self.pkg.changes["distribution"].has_key(source):
656                     del self.pkg.changes["distribution"][source]
657                     self.pkg.changes["distribution"][dest] = 1
658                     if mtype != "silent-map":
659                         self.notes.append("Mapping %s to %s." % (source, dest))
660                 if self.pkg.changes.has_key("distribution-version"):
661                     if self.pkg.changes["distribution-version"].has_key(source):
662                         self.pkg.changes["distribution-version"][source]=dest
663             elif mtype == "map-unreleased":
664                 (source, dest) = args[1:3]
665                 if self.pkg.changes["distribution"].has_key(source):
666                     for arch in self.pkg.changes["architecture"].keys():
667                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669                             del self.pkg.changes["distribution"][source]
670                             self.pkg.changes["distribution"][dest] = 1
671                             break
672             elif mtype == "ignore":
673                 suite = args[1]
674                 if self.pkg.changes["distribution"].has_key(suite):
675                     del self.pkg.changes["distribution"][suite]
676                     self.warnings.append("Ignoring %s as a target suite." % (suite))
677             elif mtype == "reject":
678                 suite = args[1]
679                 if self.pkg.changes["distribution"].has_key(suite):
680                     self.rejects.append("Uploads to %s are not accepted." % (suite))
681             elif mtype == "propup-version":
682                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
683                 #
684                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685                 if self.pkg.changes["distribution"].has_key(args[1]):
686                     self.pkg.changes.setdefault("distribution-version", {})
687                     for suite in args[2:]:
688                         self.pkg.changes["distribution-version"][suite] = suite
689
690         # Ensure there is (still) a target distribution
691         if len(self.pkg.changes["distribution"].keys()) < 1:
692             self.rejects.append("No valid distribution remaining.")
693
694         # Ensure target distributions exist
695         for suite in self.pkg.changes["distribution"].keys():
696             if not Cnf.has_key("Suite::%s" % (suite)):
697                 self.rejects.append("Unknown distribution `%s'." % (suite))
698
699     ###########################################################################
700
701     def binary_file_checks(self, f, session):
702         cnf = Config()
703         entry = self.pkg.files[f]
704
705         # Extract package control information
706         deb_file = utils.open_file(f)
707         try:
708             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
709         except:
710             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
711             deb_file.close()
712             # Can't continue, none of the checks on control would work.
713             return
714
715         # Check for mandantory "Description:"
716         deb_file.seek(0)
717         try:
718             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
719         except:
720             self.rejects.append("%s: Missing Description in binary package" % (f))
721             return
722
723         deb_file.close()
724
725         # Check for mandatory fields
726         for field in [ "Package", "Architecture", "Version" ]:
727             if control.Find(field) == None:
728                 # Can't continue
729                 self.rejects.append("%s: No %s field in control." % (f, field))
730                 return
731
732         # Ensure the package name matches the one give in the .changes
733         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
735
736         # Validate the package field
737         package = control.Find("Package")
738         if not re_valid_pkg_name.match(package):
739             self.rejects.append("%s: invalid package name '%s'." % (f, package))
740
741         # Validate the version field
742         version = control.Find("Version")
743         if not re_valid_version.match(version):
744             self.rejects.append("%s: invalid version number '%s'." % (f, version))
745
746         # Ensure the architecture of the .deb is one we know about.
747         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748         architecture = control.Find("Architecture")
749         upload_suite = self.pkg.changes["distribution"].keys()[0]
750
751         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753             self.rejects.append("Unknown architecture '%s'." % (architecture))
754
755         # Ensure the architecture of the .deb is one of the ones
756         # listed in the .changes.
757         if not self.pkg.changes["architecture"].has_key(architecture):
758             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
759
760         # Sanity-check the Depends field
761         depends = control.Find("Depends")
762         if depends == '':
763             self.rejects.append("%s: Depends field is empty." % (f))
764
765         # Sanity-check the Provides field
766         provides = control.Find("Provides")
767         if provides:
768             provide = re_spacestrip.sub('', provides)
769             if provide == '':
770                 self.rejects.append("%s: Provides field is empty." % (f))
771             prov_list = provide.split(",")
772             for prov in prov_list:
773                 if not re_valid_pkg_name.match(prov):
774                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
775
776         # If there is a Built-Using field, we need to check we can find the
777         # exact source version
778         built_using = control.Find("Built-Using")
779         if built_using:
780             try:
781                 entry["built-using"] = []
782                 for dep in apt_pkg.parse_depends(built_using):
783                     bu_s, bu_v, bu_e = dep[0]
784                     # Check that it's an exact match dependency and we have
785                     # some form of version
786                     if bu_e != "=" or len(bu_v) < 1:
787                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
788                     else:
789                         # Find the source id for this version
790                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
791                         if len(bu_so) != 1:
792                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
793                         else:
794                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
795
796             except ValueError, e:
797                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
798
799
800         # Check the section & priority match those given in the .changes (non-fatal)
801         if     control.Find("Section") and entry["section"] != "" \
802            and entry["section"] != control.Find("Section"):
803             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804                                 (f, control.Find("Section", ""), entry["section"]))
805         if control.Find("Priority") and entry["priority"] != "" \
806            and entry["priority"] != control.Find("Priority"):
807             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808                                 (f, control.Find("Priority", ""), entry["priority"]))
809
810         entry["package"] = package
811         entry["architecture"] = architecture
812         entry["version"] = version
813         entry["maintainer"] = control.Find("Maintainer", "")
814
815         if f.endswith(".udeb"):
816             self.pkg.files[f]["dbtype"] = "udeb"
817         elif f.endswith(".deb"):
818             self.pkg.files[f]["dbtype"] = "deb"
819         else:
820             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
821
822         entry["source"] = control.Find("Source", entry["package"])
823
824         # Get the source version
825         source = entry["source"]
826         source_version = ""
827
828         if source.find("(") != -1:
829             m = re_extract_src_version.match(source)
830             source = m.group(1)
831             source_version = m.group(2)
832
833         if not source_version:
834             source_version = self.pkg.files[f]["version"]
835
836         entry["source package"] = source
837         entry["source version"] = source_version
838
839         # Ensure the filename matches the contents of the .deb
840         m = re_isadeb.match(f)
841
842         #  package name
843         file_package = m.group(1)
844         if entry["package"] != file_package:
845             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846                                 (f, file_package, entry["dbtype"], entry["package"]))
847         epochless_version = re_no_epoch.sub('', control.Find("Version"))
848
849         #  version
850         file_version = m.group(2)
851         if epochless_version != file_version:
852             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853                                 (f, file_version, entry["dbtype"], epochless_version))
854
855         #  architecture
856         file_architecture = m.group(3)
857         if entry["architecture"] != file_architecture:
858             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
860
861         # Check for existent source
862         source_version = entry["source version"]
863         source_package = entry["source package"]
864         if self.pkg.changes["architecture"].has_key("source"):
865             if source_version != self.pkg.changes["version"]:
866                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867                                     (source_version, f, self.pkg.changes["version"]))
868         else:
869             # Check in the SQL database
870             if not source_exists(source_package, source_version, suites = \
871                 self.pkg.changes["distribution"].keys(), session = session):
872                 # Check in one of the other directories
873                 source_epochless_version = re_no_epoch.sub('', source_version)
874                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
875                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
876                     entry["byhand"] = 1
877                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
878                     entry["new"] = 1
879                 else:
880                     dsc_file_exists = False
881                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
882                         if cnf.has_key("Dir::Queue::%s" % (myq)):
883                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
884                                 dsc_file_exists = True
885                                 break
886
887                     if not dsc_file_exists:
888                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
889
890         # Check the version and for file overwrites
891         self.check_binary_against_db(f, session)
892
893     def source_file_checks(self, f, session):
894         entry = self.pkg.files[f]
895
896         m = re_issource.match(f)
897         if not m:
898             return
899
900         entry["package"] = m.group(1)
901         entry["version"] = m.group(2)
902         entry["type"] = m.group(3)
903
904         # Ensure the source package name matches the Source filed in the .changes
905         if self.pkg.changes["source"] != entry["package"]:
906             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
907
908         # Ensure the source version matches the version in the .changes file
909         if re_is_orig_source.match(f):
910             changes_version = self.pkg.changes["chopversion2"]
911         else:
912             changes_version = self.pkg.changes["chopversion"]
913
914         if changes_version != entry["version"]:
915             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
916
917         # Ensure the .changes lists source in the Architecture field
918         if not self.pkg.changes["architecture"].has_key("source"):
919             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
920
921         # Check the signature of a .dsc file
922         if entry["type"] == "dsc":
923             # check_signature returns either:
924             #  (None, [list, of, rejects]) or (signature, [])
925             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
926             for j in rejects:
927                 self.rejects.append(j)
928
929         entry["architecture"] = "source"
930
931     def per_suite_file_checks(self, f, suite, session):
932         cnf = Config()
933         entry = self.pkg.files[f]
934
935         # Skip byhand
936         if entry.has_key("byhand"):
937             return
938
939         # Check we have fields we need to do these checks
940         oktogo = True
941         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
942             if not entry.has_key(m):
943                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
944                 oktogo = False
945
946         if not oktogo:
947             return
948
949         # Handle component mappings
950         for m in cnf.ValueList("ComponentMappings"):
951             (source, dest) = m.split()
952             if entry["component"] == source:
953                 entry["original component"] = source
954                 entry["component"] = dest
955
956         # Ensure the component is valid for the target suite
957         if cnf.has_key("Suite:%s::Components" % (suite)) and \
958            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
959             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
960             return
961
962         # Validate the component
963         if not get_component(entry["component"], session):
964             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
965             return
966
967         # See if the package is NEW
968         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
969             entry["new"] = 1
970
971         # Validate the priority
972         if entry["priority"].find('/') != -1:
973             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
974
975         # Determine the location
976         location = cnf["Dir::Pool"]
977         l = get_location(location, entry["component"], session=session)
978         if l is None:
979             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
980             entry["location id"] = -1
981         else:
982             entry["location id"] = l.location_id
983
984         # Check the md5sum & size against existing files (if any)
985         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
986
987         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
988                                          entry["size"], entry["md5sum"], entry["location id"])
989
990         if found is None:
991             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
992         elif found is False and poolfile is not None:
993             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
994         else:
995             if poolfile is None:
996                 entry["files id"] = None
997             else:
998                 entry["files id"] = poolfile.file_id
999
1000         # Check for packages that have moved from one component to another
1001         entry['suite'] = suite
1002         arch_list = [entry["architecture"], 'all']
1003         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1004             [suite], arch_list = arch_list, session = session)
1005         if component is not None:
1006             entry["othercomponents"] = component
1007
1008     def check_files(self, action=True):
1009         file_keys = self.pkg.files.keys()
1010         holding = Holding()
1011         cnf = Config()
1012
1013         if action:
1014             cwd = os.getcwd()
1015             os.chdir(self.pkg.directory)
1016             for f in file_keys:
1017                 ret = holding.copy_to_holding(f)
1018                 if ret is not None:
1019                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1020
1021             os.chdir(cwd)
1022
1023         # check we already know the changes file
1024         # [NB: this check must be done post-suite mapping]
1025         base_filename = os.path.basename(self.pkg.changes_file)
1026
1027         session = DBConn().session()
1028
1029         try:
1030             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1031             # if in the pool or in a queue other than unchecked, reject
1032             if (dbc.in_queue is None) \
1033                    or (dbc.in_queue is not None
1034                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1035                 self.rejects.append("%s file already known to dak" % base_filename)
1036         except NoResultFound, e:
1037             # not known, good
1038             pass
1039
1040         has_binaries = False
1041         has_source = False
1042
1043         for f, entry in self.pkg.files.items():
1044             # Ensure the file does not already exist in one of the accepted directories
1045             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1046                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1047                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1048                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1049
1050             if not re_taint_free.match(f):
1051                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1052
1053             # Check the file is readable
1054             if os.access(f, os.R_OK) == 0:
1055                 # When running in -n, copy_to_holding() won't have
1056                 # generated the reject_message, so we need to.
1057                 if action:
1058                     if os.path.exists(f):
1059                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1060                     else:
1061                         # Don't directly reject, mark to check later to deal with orig's
1062                         # we can find in the pool
1063                         self.later_check_files.append(f)
1064                 entry["type"] = "unreadable"
1065                 continue
1066
1067             # If it's byhand skip remaining checks
1068             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1069                 entry["byhand"] = 1
1070                 entry["type"] = "byhand"
1071
1072             # Checks for a binary package...
1073             elif re_isadeb.match(f):
1074                 has_binaries = True
1075                 entry["type"] = "deb"
1076
1077                 # This routine appends to self.rejects/warnings as appropriate
1078                 self.binary_file_checks(f, session)
1079
1080             # Checks for a source package...
1081             elif re_issource.match(f):
1082                 has_source = True
1083
1084                 # This routine appends to self.rejects/warnings as appropriate
1085                 self.source_file_checks(f, session)
1086
1087             # Not a binary or source package?  Assume byhand...
1088             else:
1089                 entry["byhand"] = 1
1090                 entry["type"] = "byhand"
1091
1092             # Per-suite file checks
1093             entry["oldfiles"] = {}
1094             for suite in self.pkg.changes["distribution"].keys():
1095                 self.per_suite_file_checks(f, suite, session)
1096
1097         session.close()
1098
1099         # If the .changes file says it has source, it must have source.
1100         if self.pkg.changes["architecture"].has_key("source"):
1101             if not has_source:
1102                 self.rejects.append("no source found and Architecture line in changes mention source.")
1103
1104             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1105                 self.rejects.append("source only uploads are not supported.")
1106
1107     ###########################################################################
1108     def check_dsc(self, action=True, session=None):
1109         """Returns bool indicating whether or not the source changes are valid"""
1110         # Ensure there is source to check
1111         if not self.pkg.changes["architecture"].has_key("source"):
1112             return True
1113
1114         # Find the .dsc
1115         dsc_filename = None
1116         for f, entry in self.pkg.files.items():
1117             if entry["type"] == "dsc":
1118                 if dsc_filename:
1119                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1120                     return False
1121                 else:
1122                     dsc_filename = f
1123
1124         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1125         if not dsc_filename:
1126             self.rejects.append("source uploads must contain a dsc file")
1127             return False
1128
1129         # Parse the .dsc file
1130         try:
1131             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1132         except CantOpenError:
1133             # if not -n copy_to_holding() will have done this for us...
1134             if not action:
1135                 self.rejects.append("%s: can't read file." % (dsc_filename))
1136         except ParseChangesError, line:
1137             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1138         except InvalidDscError, line:
1139             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1140         except ChangesUnicodeError:
1141             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1142
1143         # Build up the file list of files mentioned by the .dsc
1144         try:
1145             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1146         except NoFilesFieldError:
1147             self.rejects.append("%s: no Files: field." % (dsc_filename))
1148             return False
1149         except UnknownFormatError, format:
1150             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1151             return False
1152         except ParseChangesError, line:
1153             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1154             return False
1155
1156         # Enforce mandatory fields
1157         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1158             if not self.pkg.dsc.has_key(i):
1159                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1160                 return False
1161
1162         # Validate the source and version fields
1163         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1164             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1165         if not re_valid_version.match(self.pkg.dsc["version"]):
1166             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1167
1168         # Only a limited list of source formats are allowed in each suite
1169         for dist in self.pkg.changes["distribution"].keys():
1170             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1171             if self.pkg.dsc["format"] not in allowed:
1172                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1173
1174         # Validate the Maintainer field
1175         try:
1176             # We ignore the return value
1177             fix_maintainer(self.pkg.dsc["maintainer"])
1178         except ParseMaintError, msg:
1179             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1180                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1181
1182         # Validate the build-depends field(s)
1183         for field_name in [ "build-depends", "build-depends-indep" ]:
1184             field = self.pkg.dsc.get(field_name)
1185             if field:
1186                 # Have apt try to parse them...
1187                 try:
1188                     apt_pkg.ParseSrcDepends(field)
1189                 except:
1190                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1191
1192         # Ensure the version number in the .dsc matches the version number in the .changes
1193         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1194         changes_version = self.pkg.files[dsc_filename]["version"]
1195
1196         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1197             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1198
1199         # Ensure the Files field contain only what's expected
1200         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1201
1202         # Ensure source is newer than existing source in target suites
1203         session = DBConn().session()
1204         self.check_source_against_db(dsc_filename, session)
1205         self.check_dsc_against_db(dsc_filename, session)
1206
1207         dbchg = get_dbchange(self.pkg.changes_file, session)
1208
1209         # Finally, check if we're missing any files
1210         for f in self.later_check_files:
1211             print 'XXX: %s' % f
1212             # Check if we've already processed this file if we have a dbchg object
1213             ok = False
1214             if dbchg:
1215                 for pf in dbchg.files:
1216                     if pf.filename == f and pf.processed:
1217                         self.notes.append('%s was already processed so we can go ahead' % f)
1218                         ok = True
1219                         del self.pkg.files[f]
1220             if not ok:
1221                 self.rejects.append("Could not find file %s references in changes" % f)
1222
1223         session.close()
1224
1225         return True
1226
1227     ###########################################################################
1228
1229     def get_changelog_versions(self, source_dir):
1230         """Extracts a the source package and (optionally) grabs the
1231         version history out of debian/changelog for the BTS."""
1232
1233         cnf = Config()
1234
1235         # Find the .dsc (again)
1236         dsc_filename = None
1237         for f in self.pkg.files.keys():
1238             if self.pkg.files[f]["type"] == "dsc":
1239                 dsc_filename = f
1240
1241         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1242         if not dsc_filename:
1243             return
1244
1245         # Create a symlink mirror of the source files in our temporary directory
1246         for f in self.pkg.files.keys():
1247             m = re_issource.match(f)
1248             if m:
1249                 src = os.path.join(source_dir, f)
1250                 # If a file is missing for whatever reason, give up.
1251                 if not os.path.exists(src):
1252                     return
1253                 ftype = m.group(3)
1254                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1255                    self.pkg.orig_files[f].has_key("path"):
1256                     continue
1257                 dest = os.path.join(os.getcwd(), f)
1258                 os.symlink(src, dest)
1259
1260         # If the orig files are not a part of the upload, create symlinks to the
1261         # existing copies.
1262         for orig_file in self.pkg.orig_files.keys():
1263             if not self.pkg.orig_files[orig_file].has_key("path"):
1264                 continue
1265             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1266             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1267
1268         # Extract the source
1269         try:
1270             unpacked = UnpackedSource(dsc_filename)
1271         except:
1272             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1273             return
1274
1275         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1276             return
1277
1278         # Get the upstream version
1279         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1280         if re_strip_revision.search(upstr_version):
1281             upstr_version = re_strip_revision.sub('', upstr_version)
1282
1283         # Ensure the changelog file exists
1284         changelog_file = unpacked.get_changelog_file()
1285         if changelog_file is None:
1286             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1287             return
1288
1289         # Parse the changelog
1290         self.pkg.dsc["bts changelog"] = ""
1291         for line in changelog_file.readlines():
1292             m = re_changelog_versions.match(line)
1293             if m:
1294                 self.pkg.dsc["bts changelog"] += line
1295         changelog_file.close()
1296         unpacked.cleanup()
1297
1298         # Check we found at least one revision in the changelog
1299         if not self.pkg.dsc["bts changelog"]:
1300             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1301
1302     def check_source(self):
1303         # Bail out if:
1304         #    a) there's no source
1305         if not self.pkg.changes["architecture"].has_key("source"):
1306             return
1307
1308         tmpdir = utils.temp_dirname()
1309
1310         # Move into the temporary directory
1311         cwd = os.getcwd()
1312         os.chdir(tmpdir)
1313
1314         # Get the changelog version history
1315         self.get_changelog_versions(cwd)
1316
1317         # Move back and cleanup the temporary tree
1318         os.chdir(cwd)
1319
1320         try:
1321             shutil.rmtree(tmpdir)
1322         except OSError, e:
1323             if e.errno != errno.EACCES:
1324                 print "foobar"
1325                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1326
1327             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1328             # We probably have u-r or u-w directories so chmod everything
1329             # and try again.
1330             cmd = "chmod -R u+rwx %s" % (tmpdir)
1331             result = os.system(cmd)
1332             if result != 0:
1333                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1334             shutil.rmtree(tmpdir)
1335         except Exception, e:
1336             print "foobar2 (%s)" % e
1337             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1338
1339     ###########################################################################
1340     def ensure_hashes(self):
1341         # Make sure we recognise the format of the Files: field in the .changes
1342         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1343         if len(format) == 2:
1344             format = int(format[0]), int(format[1])
1345         else:
1346             format = int(float(format[0])), 0
1347
1348         # We need to deal with the original changes blob, as the fields we need
1349         # might not be in the changes dict serialised into the .dak anymore.
1350         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1351
1352         # Copy the checksums over to the current changes dict.  This will keep
1353         # the existing modifications to it intact.
1354         for field in orig_changes:
1355             if field.startswith('checksums-'):
1356                 self.pkg.changes[field] = orig_changes[field]
1357
1358         # Check for unsupported hashes
1359         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1360             self.rejects.append(j)
1361
1362         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1363             self.rejects.append(j)
1364
1365         # We have to calculate the hash if we have an earlier changes version than
1366         # the hash appears in rather than require it exist in the changes file
1367         for hashname, hashfunc, version in utils.known_hashes:
1368             # TODO: Move _ensure_changes_hash into this class
1369             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1370                 self.rejects.append(j)
1371             if "source" in self.pkg.changes["architecture"]:
1372                 # TODO: Move _ensure_dsc_hash into this class
1373                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1374                     self.rejects.append(j)
1375
1376     def check_hashes(self):
1377         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1378             self.rejects.append(m)
1379
1380         for m in utils.check_size(".changes", self.pkg.files):
1381             self.rejects.append(m)
1382
1383         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1384             self.rejects.append(m)
1385
1386         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1387             self.rejects.append(m)
1388
1389         self.ensure_hashes()
1390
1391     ###########################################################################
1392
1393     def ensure_orig(self, target_dir='.', session=None):
1394         """
1395         Ensures that all orig files mentioned in the changes file are present
1396         in target_dir. If they do not exist, they are symlinked into place.
1397
1398         An list containing the symlinks that were created are returned (so they
1399         can be removed).
1400         """
1401
1402         symlinked = []
1403         cnf = Config()
1404
1405         for filename, entry in self.pkg.dsc_files.iteritems():
1406             if not re_is_orig_source.match(filename):
1407                 # File is not an orig; ignore
1408                 continue
1409
1410             if os.path.exists(filename):
1411                 # File exists, no need to continue
1412                 continue
1413
1414             def symlink_if_valid(path):
1415                 f = utils.open_file(path)
1416                 md5sum = apt_pkg.md5sum(f)
1417                 f.close()
1418
1419                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1420                 expected = (int(entry['size']), entry['md5sum'])
1421
1422                 if fingerprint != expected:
1423                     return False
1424
1425                 dest = os.path.join(target_dir, filename)
1426
1427                 os.symlink(path, dest)
1428                 symlinked.append(dest)
1429
1430                 return True
1431
1432             session_ = session
1433             if session is None:
1434                 session_ = DBConn().session()
1435
1436             found = False
1437
1438             # Look in the pool
1439             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1440                 poolfile_path = os.path.join(
1441                     poolfile.location.path, poolfile.filename
1442                 )
1443
1444                 if symlink_if_valid(poolfile_path):
1445                     found = True
1446                     break
1447
1448             if session is None:
1449                 session_.close()
1450
1451             if found:
1452                 continue
1453
1454             # Look in some other queues for the file
1455             queues = ('New', 'Byhand', 'ProposedUpdates',
1456                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1457
1458             for queue in queues:
1459                 if not cnf.get('Dir::Queue::%s' % queue):
1460                     continue
1461
1462                 queuefile_path = os.path.join(
1463                     cnf['Dir::Queue::%s' % queue], filename
1464                 )
1465
1466                 if not os.path.exists(queuefile_path):
1467                     # Does not exist in this queue
1468                     continue
1469
1470                 if symlink_if_valid(queuefile_path):
1471                     break
1472
1473         return symlinked
1474
1475     ###########################################################################
1476
1477     def check_lintian(self):
1478         """
1479         Extends self.rejects by checking the output of lintian against tags
1480         specified in Dinstall::LintianTags.
1481         """
1482
1483         cnf = Config()
1484
1485         # Don't reject binary uploads
1486         if not self.pkg.changes['architecture'].has_key('source'):
1487             return
1488
1489         # Only check some distributions
1490         for dist in ('unstable', 'experimental'):
1491             if dist in self.pkg.changes['distribution']:
1492                 break
1493         else:
1494             return
1495
1496         # If we do not have a tagfile, don't do anything
1497         tagfile = cnf.get("Dinstall::LintianTags")
1498         if tagfile is None:
1499             return
1500
1501         # Parse the yaml file
1502         sourcefile = file(tagfile, 'r')
1503         sourcecontent = sourcefile.read()
1504         sourcefile.close()
1505
1506         try:
1507             lintiantags = yaml.load(sourcecontent)['lintian']
1508         except yaml.YAMLError, msg:
1509             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1510             return
1511
1512         # Try and find all orig mentioned in the .dsc
1513         symlinked = self.ensure_orig()
1514
1515         # Setup the input file for lintian
1516         fd, temp_filename = utils.temp_filename()
1517         temptagfile = os.fdopen(fd, 'w')
1518         for tags in lintiantags.values():
1519             temptagfile.writelines(['%s\n' % x for x in tags])
1520         temptagfile.close()
1521
1522         try:
1523             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1524                 (temp_filename, self.pkg.changes_file)
1525
1526             result, output = commands.getstatusoutput(cmd)
1527         finally:
1528             # Remove our tempfile and any symlinks we created
1529             os.unlink(temp_filename)
1530
1531             for symlink in symlinked:
1532                 os.unlink(symlink)
1533
1534         if result == 2:
1535             utils.warn("lintian failed for %s [return code: %s]." % \
1536                 (self.pkg.changes_file, result))
1537             utils.warn(utils.prefix_multi_line_string(output, \
1538                 " [possible output:] "))
1539
1540         def log(*txt):
1541             if self.logger:
1542                 self.logger.log(
1543                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1544                 )
1545
1546         # Generate messages
1547         parsed_tags = parse_lintian_output(output)
1548         self.rejects.extend(
1549             generate_reject_messages(parsed_tags, lintiantags, log=log)
1550         )
1551
1552     ###########################################################################
1553     def check_urgency(self):
1554         cnf = Config()
1555         if self.pkg.changes["architecture"].has_key("source"):
1556             if not self.pkg.changes.has_key("urgency"):
1557                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1558             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1559             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1560                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1561                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1562                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1563
1564     ###########################################################################
1565
1566     # Sanity check the time stamps of files inside debs.
1567     # [Files in the near future cause ugly warnings and extreme time
1568     #  travel can cause errors on extraction]
1569
1570     def check_timestamps(self):
1571         Cnf = Config()
1572
1573         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1574         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1575         tar = TarTime(future_cutoff, past_cutoff)
1576
1577         for filename, entry in self.pkg.files.items():
1578             if entry["type"] == "deb":
1579                 tar.reset()
1580                 try:
1581                     deb_file = utils.open_file(filename)
1582                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1583                     deb_file.seek(0)
1584                     try:
1585                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1586                     except SystemError, e:
1587                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1588                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1589                             raise
1590                         deb_file.seek(0)
1591                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1592
1593                     deb_file.close()
1594
1595                     future_files = tar.future_files.keys()
1596                     if future_files:
1597                         num_future_files = len(future_files)
1598                         future_file = future_files[0]
1599                         future_date = tar.future_files[future_file]
1600                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1601                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1602
1603                     ancient_files = tar.ancient_files.keys()
1604                     if ancient_files:
1605                         num_ancient_files = len(ancient_files)
1606                         ancient_file = ancient_files[0]
1607                         ancient_date = tar.ancient_files[ancient_file]
1608                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1609                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1610                 except:
1611                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1612
1613     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1614         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1615             sponsored = False
1616         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1617             sponsored = False
1618             if uid_name == "":
1619                 sponsored = True
1620         else:
1621             sponsored = True
1622             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1623                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1624                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1625                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1626                         self.pkg.changes["sponsoremail"] = uid_email
1627
1628         return sponsored
1629
1630
1631     ###########################################################################
1632     # check_signed_by_key checks
1633     ###########################################################################
1634
1635     def check_signed_by_key(self):
1636         """Ensure the .changes is signed by an authorized uploader."""
1637         session = DBConn().session()
1638
1639         # First of all we check that the person has proper upload permissions
1640         # and that this upload isn't blocked
1641         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1642
1643         if fpr is None:
1644             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1645             return
1646
1647         # TODO: Check that import-keyring adds UIDs properly
1648         if not fpr.uid:
1649             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1650             return
1651
1652         # Check that the fingerprint which uploaded has permission to do so
1653         self.check_upload_permissions(fpr, session)
1654
1655         # Check that this package is not in a transition
1656         self.check_transition(session)
1657
1658         session.close()
1659
1660
1661     def check_upload_permissions(self, fpr, session):
1662         # Check any one-off upload blocks
1663         self.check_upload_blocks(fpr, session)
1664
1665         # Start with DM as a special case
1666         # DM is a special case unfortunately, so we check it first
1667         # (keys with no source access get more access than DMs in one
1668         #  way; DMs can only upload for their packages whether source
1669         #  or binary, whereas keys with no access might be able to
1670         #  upload some binaries)
1671         if fpr.source_acl.access_level == 'dm':
1672             self.check_dm_upload(fpr, session)
1673         else:
1674             # Check source-based permissions for other types
1675             if self.pkg.changes["architecture"].has_key("source") and \
1676                 fpr.source_acl.access_level is None:
1677                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1678                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1679                 self.rejects.append(rej)
1680                 return
1681             # If not a DM, we allow full upload rights
1682             uid_email = "%s@debian.org" % (fpr.uid.uid)
1683             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1684
1685
1686         # Check binary upload permissions
1687         # By this point we know that DMs can't have got here unless they
1688         # are allowed to deal with the package concerned so just apply
1689         # normal checks
1690         if fpr.binary_acl.access_level == 'full':
1691             return
1692
1693         # Otherwise we're in the map case
1694         tmparches = self.pkg.changes["architecture"].copy()
1695         tmparches.pop('source', None)
1696
1697         for bam in fpr.binary_acl_map:
1698             tmparches.pop(bam.architecture.arch_string, None)
1699
1700         if len(tmparches.keys()) > 0:
1701             if fpr.binary_reject:
1702                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1703                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1704                 self.rejects.append(rej)
1705             else:
1706                 # TODO: This is where we'll implement reject vs throw away binaries later
1707                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1708                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1709                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1710                 self.rejects.append(rej)
1711
1712
1713     def check_upload_blocks(self, fpr, session):
1714         """Check whether any upload blocks apply to this source, source
1715            version, uid / fpr combination"""
1716
1717         def block_rej_template(fb):
1718             rej = 'Manual upload block in place for package %s' % fb.source
1719             if fb.version is not None:
1720                 rej += ', version %s' % fb.version
1721             return rej
1722
1723         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1724             # version is None if the block applies to all versions
1725             if fb.version is None or fb.version == self.pkg.changes['version']:
1726                 # Check both fpr and uid - either is enough to cause a reject
1727                 if fb.fpr is not None:
1728                     if fb.fpr.fingerprint == fpr.fingerprint:
1729                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1730                 if fb.uid is not None:
1731                     if fb.uid == fpr.uid:
1732                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1733
1734
1735     def check_dm_upload(self, fpr, session):
1736         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1737         ## none of the uploaded packages are NEW
1738         rej = False
1739         for f in self.pkg.files.keys():
1740             if self.pkg.files[f].has_key("byhand"):
1741                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1742                 rej = True
1743             if self.pkg.files[f].has_key("new"):
1744                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1745                 rej = True
1746
1747         if rej:
1748             return
1749
1750         r = get_newest_source(self.pkg.changes["source"], session)
1751
1752         if r is None:
1753             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1754             self.rejects.append(rej)
1755             return
1756
1757         if not r.dm_upload_allowed:
1758             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1759             self.rejects.append(rej)
1760             return
1761
1762         ## the Maintainer: field of the uploaded .changes file corresponds with
1763         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1764         ## uploads)
1765         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1766             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1767
1768         ## the most recent version of the package uploaded to unstable or
1769         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1770         ## non-developer maintainers cannot NMU or hijack packages)
1771
1772         # srcuploaders includes the maintainer
1773         accept = False
1774         for sup in r.srcuploaders:
1775             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1776             # Eww - I hope we never have two people with the same name in Debian
1777             if email == fpr.uid.uid or name == fpr.uid.name:
1778                 accept = True
1779                 break
1780
1781         if not accept:
1782             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1783             return
1784
1785         ## none of the packages are being taken over from other source packages
1786         for b in self.pkg.changes["binary"].keys():
1787             for suite in self.pkg.changes["distribution"].keys():
1788                 for s in get_source_by_package_and_suite(b, suite, session):
1789                     if s.source != self.pkg.changes["source"]:
1790                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1791
1792
1793
1794     def check_transition(self, session):
1795         cnf = Config()
1796
1797         sourcepkg = self.pkg.changes["source"]
1798
1799         # No sourceful upload -> no need to do anything else, direct return
1800         # We also work with unstable uploads, not experimental or those going to some
1801         # proposed-updates queue
1802         if "source" not in self.pkg.changes["architecture"] or \
1803            "unstable" not in self.pkg.changes["distribution"]:
1804             return
1805
1806         # Also only check if there is a file defined (and existant) with
1807         # checks.
1808         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1809         if transpath == "" or not os.path.exists(transpath):
1810             return
1811
1812         # Parse the yaml file
1813         sourcefile = file(transpath, 'r')
1814         sourcecontent = sourcefile.read()
1815         try:
1816             transitions = yaml.load(sourcecontent)
1817         except yaml.YAMLError, msg:
1818             # This shouldn't happen, there is a wrapper to edit the file which
1819             # checks it, but we prefer to be safe than ending up rejecting
1820             # everything.
1821             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1822             return
1823
1824         # Now look through all defined transitions
1825         for trans in transitions:
1826             t = transitions[trans]
1827             source = t["source"]
1828             expected = t["new"]
1829
1830             # Will be None if nothing is in testing.
1831             current = get_source_in_suite(source, "testing", session)
1832             if current is not None:
1833                 compare = apt_pkg.VersionCompare(current.version, expected)
1834
1835             if current is None or compare < 0:
1836                 # This is still valid, the current version in testing is older than
1837                 # the new version we wait for, or there is none in testing yet
1838
1839                 # Check if the source we look at is affected by this.
1840                 if sourcepkg in t['packages']:
1841                     # The source is affected, lets reject it.
1842
1843                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1844                         sourcepkg, trans)
1845
1846                     if current is not None:
1847                         currentlymsg = "at version %s" % (current.version)
1848                     else:
1849                         currentlymsg = "not present in testing"
1850
1851                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1852
1853                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1854 is part of a testing transition designed to get %s migrated (it is
1855 currently %s, we need version %s).  This transition is managed by the
1856 Release Team, and %s is the Release-Team member responsible for it.
1857 Please mail debian-release@lists.debian.org or contact %s directly if you
1858 need further assistance.  You might want to upload to experimental until this
1859 transition is done."""
1860                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1861
1862                     self.rejects.append(rejectmsg)
1863                     return
1864
1865     ###########################################################################
1866     # End check_signed_by_key checks
1867     ###########################################################################
1868
1869     def build_summaries(self):
1870         """ Build a summary of changes the upload introduces. """
1871
1872         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1873
1874         short_summary = summary
1875
1876         # This is for direport's benefit...
1877         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1878
1879         if byhand or new:
1880             summary += "Changes: " + f
1881
1882         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1883
1884         summary += self.announce(short_summary, 0)
1885
1886         return (summary, short_summary)
1887
1888     ###########################################################################
1889
1890     def close_bugs(self, summary, action):
1891         """
1892         Send mail to close bugs as instructed by the closes field in the changes file.
1893         Also add a line to summary if any work was done.
1894
1895         @type summary: string
1896         @param summary: summary text, as given by L{build_summaries}
1897
1898         @type action: bool
1899         @param action: Set to false no real action will be done.
1900
1901         @rtype: string
1902         @return: summary. If action was taken, extended by the list of closed bugs.
1903
1904         """
1905
1906         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1907
1908         bugs = self.pkg.changes["closes"].keys()
1909
1910         if not bugs:
1911             return summary
1912
1913         bugs.sort()
1914         summary += "Closing bugs: "
1915         for bug in bugs:
1916             summary += "%s " % (bug)
1917             if action:
1918                 self.update_subst()
1919                 self.Subst["__BUG_NUMBER__"] = bug
1920                 if self.pkg.changes["distribution"].has_key("stable"):
1921                     self.Subst["__STABLE_WARNING__"] = """
1922 Note that this package is not part of the released stable Debian
1923 distribution.  It may have dependencies on other unreleased software,
1924 or other instabilities.  Please take care if you wish to install it.
1925 The update will eventually make its way into the next released Debian
1926 distribution."""
1927                 else:
1928                     self.Subst["__STABLE_WARNING__"] = ""
1929                 mail_message = utils.TemplateSubst(self.Subst, template)
1930                 utils.send_mail(mail_message)
1931
1932                 # Clear up after ourselves
1933                 del self.Subst["__BUG_NUMBER__"]
1934                 del self.Subst["__STABLE_WARNING__"]
1935
1936         if action and self.logger:
1937             self.logger.log(["closing bugs"] + bugs)
1938
1939         summary += "\n"
1940
1941         return summary
1942
1943     ###########################################################################
1944
1945     def announce(self, short_summary, action):
1946         """
1947         Send an announce mail about a new upload.
1948
1949         @type short_summary: string
1950         @param short_summary: Short summary text to include in the mail
1951
1952         @type action: bool
1953         @param action: Set to false no real action will be done.
1954
1955         @rtype: string
1956         @return: Textstring about action taken.
1957
1958         """
1959
1960         cnf = Config()
1961         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1962
1963         # Only do announcements for source uploads with a recent dpkg-dev installed
1964         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1965            self.pkg.changes["architecture"].has_key("source"):
1966             return ""
1967
1968         lists_done = {}
1969         summary = ""
1970
1971         self.Subst["__SHORT_SUMMARY__"] = short_summary
1972
1973         for dist in self.pkg.changes["distribution"].keys():
1974             suite = get_suite(dist)
1975             if suite is None: continue
1976             announce_list = suite.announce
1977             if announce_list == "" or lists_done.has_key(announce_list):
1978                 continue
1979
1980             lists_done[announce_list] = 1
1981             summary += "Announcing to %s\n" % (announce_list)
1982
1983             if action:
1984                 self.update_subst()
1985                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1986                 if cnf.get("Dinstall::TrackingServer") and \
1987                    self.pkg.changes["architecture"].has_key("source"):
1988                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1989                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1990
1991                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1992                 utils.send_mail(mail_message)
1993
1994                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1995
1996         if cnf.FindB("Dinstall::CloseBugs"):
1997             summary = self.close_bugs(summary, action)
1998
1999         del self.Subst["__SHORT_SUMMARY__"]
2000
2001         return summary
2002
2003     ###########################################################################
2004     @session_wrapper
2005     def accept (self, summary, short_summary, session=None):
2006         """
2007         Accept an upload.
2008
2009         This moves all files referenced from the .changes into the pool,
2010         sends the accepted mail, announces to lists, closes bugs and
2011         also checks for override disparities. If enabled it will write out
2012         the version history for the BTS Version Tracking and will finally call
2013         L{queue_build}.
2014
2015         @type summary: string
2016         @param summary: Summary text
2017
2018         @type short_summary: string
2019         @param short_summary: Short summary
2020         """
2021
2022         cnf = Config()
2023         stats = SummaryStats()
2024
2025         print "Installing."
2026         self.logger.log(["installing changes", self.pkg.changes_file])
2027
2028         binaries = []
2029         poolfiles = []
2030
2031         # Add the .dsc file to the DB first
2032         for newfile, entry in self.pkg.files.items():
2033             if entry["type"] == "dsc":
2034                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2035                 for j in pfs:
2036                     poolfiles.append(j)
2037
2038         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2039         for newfile, entry in self.pkg.files.items():
2040             if entry["type"] == "deb":
2041                 b, pf = add_deb_to_db(self, newfile, session)
2042                 binaries.append(b)
2043                 poolfiles.append(pf)
2044
2045         # If this is a sourceful diff only upload that is moving
2046         # cross-component we need to copy the .orig files into the new
2047         # component too for the same reasons as above.
2048         # XXX: mhy: I think this should be in add_dsc_to_db
2049         if self.pkg.changes["architecture"].has_key("source"):
2050             for orig_file in self.pkg.orig_files.keys():
2051                 if not self.pkg.orig_files[orig_file].has_key("id"):
2052                     continue # Skip if it's not in the pool
2053                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2054                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2055                     continue # Skip if the location didn't change
2056
2057                 # Do the move
2058                 oldf = get_poolfile_by_id(orig_file_id, session)
2059                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2060                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2061                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2062
2063                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2064
2065                 # TODO: Care about size/md5sum collisions etc
2066                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2067
2068                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2069                 if newf is None:
2070                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2071                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2072
2073                     session.flush()
2074
2075                     # Don't reference the old file from this changes
2076                     for p in poolfiles:
2077                         if p.file_id == oldf.file_id:
2078                             poolfiles.remove(p)
2079
2080                     poolfiles.append(newf)
2081
2082                     # Fix up the DSC references
2083                     toremove = []
2084
2085                     for df in source.srcfiles:
2086                         if df.poolfile.file_id == oldf.file_id:
2087                             # Add a new DSC entry and mark the old one for deletion
2088                             # Don't do it in the loop so we don't change the thing we're iterating over
2089                             newdscf = DSCFile()
2090                             newdscf.source_id = source.source_id
2091                             newdscf.poolfile_id = newf.file_id
2092                             session.add(newdscf)
2093
2094                             toremove.append(df)
2095
2096                     for df in toremove:
2097                         session.delete(df)
2098
2099                     # Flush our changes
2100                     session.flush()
2101
2102                     # Make sure that our source object is up-to-date
2103                     session.expire(source)
2104
2105         # Add changelog information to the database
2106         self.store_changelog()
2107
2108         # Install the files into the pool
2109         for newfile, entry in self.pkg.files.items():
2110             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2111             utils.move(newfile, destination)
2112             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2113             stats.accept_bytes += float(entry["size"])
2114
2115         # Copy the .changes file across for suite which need it.
2116         copy_changes = dict([(x.copychanges, '')
2117                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2118                              if x.copychanges is not None])
2119
2120         for dest in copy_changes.keys():
2121             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2122
2123         # We're done - commit the database changes
2124         session.commit()
2125         # Our SQL session will automatically start a new transaction after
2126         # the last commit
2127
2128         # Now ensure that the metadata has been added
2129         # This has to be done after we copy the files into the pool
2130         # For source if we have it:
2131         if self.pkg.changes["architecture"].has_key("source"):
2132             import_metadata_into_db(source, session)
2133
2134         # Now for any of our binaries
2135         for b in binaries:
2136             import_metadata_into_db(b, session)
2137
2138         session.commit()
2139
2140         # Move the .changes into the 'done' directory
2141         utils.move(self.pkg.changes_file,
2142                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2143
2144         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2145             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2146
2147         self.update_subst()
2148         self.Subst["__SUMMARY__"] = summary
2149         mail_message = utils.TemplateSubst(self.Subst,
2150                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2151         utils.send_mail(mail_message)
2152         self.announce(short_summary, 1)
2153
2154         ## Helper stuff for DebBugs Version Tracking
2155         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2156             if self.pkg.changes["architecture"].has_key("source"):
2157                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2158                 version_history = os.fdopen(fd, 'w')
2159                 version_history.write(self.pkg.dsc["bts changelog"])
2160                 version_history.close()
2161                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2162                                       self.pkg.changes_file[:-8]+".versions")
2163                 os.rename(temp_filename, filename)
2164                 os.chmod(filename, 0644)
2165
2166             # Write out the binary -> source mapping.
2167             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2168             debinfo = os.fdopen(fd, 'w')
2169             for name, entry in sorted(self.pkg.files.items()):
2170                 if entry["type"] == "deb":
2171                     line = " ".join([entry["package"], entry["version"],
2172                                      entry["architecture"], entry["source package"],
2173                                      entry["source version"]])
2174                     debinfo.write(line+"\n")
2175             debinfo.close()
2176             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2177                                   self.pkg.changes_file[:-8]+".debinfo")
2178             os.rename(temp_filename, filename)
2179             os.chmod(filename, 0644)
2180
2181         session.commit()
2182
2183         # Set up our copy queues (e.g. buildd queues)
2184         for suite_name in self.pkg.changes["distribution"].keys():
2185             suite = get_suite(suite_name, session)
2186             for q in suite.copy_queues:
2187                 for f in poolfiles:
2188                     q.add_file_from_pool(f)
2189
2190         session.commit()
2191
2192         # Finally...
2193         stats.accept_count += 1
2194
2195     def check_override(self):
2196         """
2197         Checks override entries for validity. Mails "Override disparity" warnings,
2198         if that feature is enabled.
2199
2200         Abandons the check if
2201           - override disparity checks are disabled
2202           - mail sending is disabled
2203         """
2204
2205         cnf = Config()
2206
2207         # Abandon the check if override disparity checks have been disabled
2208         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2209             return
2210
2211         summary = self.pkg.check_override()
2212
2213         if summary == "":
2214             return
2215
2216         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2217
2218         self.update_subst()
2219         self.Subst["__SUMMARY__"] = summary
2220         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2221         utils.send_mail(mail_message)
2222         del self.Subst["__SUMMARY__"]
2223
2224     ###########################################################################
2225
2226     def remove(self, from_dir=None):
2227         """
2228         Used (for instance) in p-u to remove the package from unchecked
2229
2230         Also removes the package from holding area.
2231         """
2232         if from_dir is None:
2233             from_dir = self.pkg.directory
2234         h = Holding()
2235
2236         for f in self.pkg.files.keys():
2237             os.unlink(os.path.join(from_dir, f))
2238             if os.path.exists(os.path.join(h.holding_dir, f)):
2239                 os.unlink(os.path.join(h.holding_dir, f))
2240
2241         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2242         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2243             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2244
2245     ###########################################################################
2246
2247     def move_to_queue (self, queue):
2248         """
2249         Move files to a destination queue using the permissions in the table
2250         """
2251         h = Holding()
2252         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2253                    queue.path, perms=int(queue.change_perms, 8))
2254         for f in self.pkg.files.keys():
2255             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2256
2257     ###########################################################################
2258
2259     def force_reject(self, reject_files):
2260         """
2261         Forcefully move files from the current directory to the
2262         reject directory.  If any file already exists in the reject
2263         directory it will be moved to the morgue to make way for
2264         the new file.
2265
2266         @type reject_files: dict
2267         @param reject_files: file dictionary
2268
2269         """
2270
2271         cnf = Config()
2272
2273         for file_entry in reject_files:
2274             # Skip any files which don't exist or which we don't have permission to copy.
2275             if os.access(file_entry, os.R_OK) == 0:
2276                 continue
2277
2278             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2279
2280             try:
2281                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2282             except OSError, e:
2283                 # File exists?  Let's find a new name by adding a number
2284                 if e.errno == errno.EEXIST:
2285                     try:
2286                         dest_file = utils.find_next_free(dest_file, 255)
2287                     except NoFreeFilenameError:
2288                         # Something's either gone badly Pete Tong, or
2289                         # someone is trying to exploit us.
2290                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2291                         return
2292
2293                     # Make sure we really got it
2294                     try:
2295                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2296                     except OSError, e:
2297                         # Likewise
2298                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2299                         return
2300                 else:
2301                     raise
2302             # If we got here, we own the destination file, so we can
2303             # safely overwrite it.
2304             utils.move(file_entry, dest_file, 1, perms=0660)
2305             os.close(dest_fd)
2306
2307     ###########################################################################
2308     def do_reject (self, manual=0, reject_message="", notes=""):
2309         """
2310         Reject an upload. If called without a reject message or C{manual} is
2311         true, spawn an editor so the user can write one.
2312
2313         @type manual: bool
2314         @param manual: manual or automated rejection
2315
2316         @type reject_message: string
2317         @param reject_message: A reject message
2318
2319         @return: 0
2320
2321         """
2322         # If we weren't given a manual rejection message, spawn an
2323         # editor so the user can add one in...
2324         if manual and not reject_message:
2325             (fd, temp_filename) = utils.temp_filename()
2326             temp_file = os.fdopen(fd, 'w')
2327             if len(notes) > 0:
2328                 for note in notes:
2329                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2330                                     % (note.author, note.version, note.notedate, note.comment))
2331             temp_file.close()
2332             editor = os.environ.get("EDITOR","vi")
2333             answer = 'E'
2334             while answer == 'E':
2335                 os.system("%s %s" % (editor, temp_filename))
2336                 temp_fh = utils.open_file(temp_filename)
2337                 reject_message = "".join(temp_fh.readlines())
2338                 temp_fh.close()
2339                 print "Reject message:"
2340                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2341                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2342                 answer = "XXX"
2343                 while prompt.find(answer) == -1:
2344                     answer = utils.our_raw_input(prompt)
2345                     m = re_default_answer.search(prompt)
2346                     if answer == "":
2347                         answer = m.group(1)
2348                     answer = answer[:1].upper()
2349             os.unlink(temp_filename)
2350             if answer == 'A':
2351                 return 1
2352             elif answer == 'Q':
2353                 sys.exit(0)
2354
2355         print "Rejecting.\n"
2356
2357         cnf = Config()
2358
2359         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2360         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2361
2362         # Move all the files into the reject directory
2363         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2364         self.force_reject(reject_files)
2365
2366         # If we fail here someone is probably trying to exploit the race
2367         # so let's just raise an exception ...
2368         if os.path.exists(reason_filename):
2369             os.unlink(reason_filename)
2370         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2371
2372         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2373
2374         self.update_subst()
2375         if not manual:
2376             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2377             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2378             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2379             os.write(reason_fd, reject_message)
2380             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2381         else:
2382             # Build up the rejection email
2383             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2384             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2385             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2386             self.Subst["__REJECT_MESSAGE__"] = ""
2387             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2388             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2389             # Write the rejection email out as the <foo>.reason file
2390             os.write(reason_fd, reject_mail_message)
2391
2392         del self.Subst["__REJECTOR_ADDRESS__"]
2393         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2394         del self.Subst["__CC__"]
2395
2396         os.close(reason_fd)
2397
2398         # Send the rejection mail
2399         utils.send_mail(reject_mail_message)
2400
2401         if self.logger:
2402             self.logger.log(["rejected", self.pkg.changes_file])
2403
2404         return 0
2405
2406     ################################################################################
2407     def in_override_p(self, package, component, suite, binary_type, filename, session):
2408         """
2409         Check if a package already has override entries in the DB
2410
2411         @type package: string
2412         @param package: package name
2413
2414         @type component: string
2415         @param component: database id of the component
2416
2417         @type suite: int
2418         @param suite: database id of the suite
2419
2420         @type binary_type: string
2421         @param binary_type: type of the package
2422
2423         @type filename: string
2424         @param filename: filename we check
2425
2426         @return: the database result. But noone cares anyway.
2427
2428         """
2429
2430         cnf = Config()
2431
2432         if binary_type == "": # must be source
2433             file_type = "dsc"
2434         else:
2435             file_type = binary_type
2436
2437         # Override suite name; used for example with proposed-updates
2438         oldsuite = get_suite(suite, session)
2439         if (not oldsuite is None) and oldsuite.overridesuite:
2440             suite = oldsuite.overridesuite
2441
2442         result = get_override(package, suite, component, file_type, session)
2443
2444         # If checking for a source package fall back on the binary override type
2445         if file_type == "dsc" and len(result) < 1:
2446             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2447
2448         # Remember the section and priority so we can check them later if appropriate
2449         if len(result) > 0:
2450             result = result[0]
2451             self.pkg.files[filename]["override section"] = result.section.section
2452             self.pkg.files[filename]["override priority"] = result.priority.priority
2453             return result
2454
2455         return None
2456
2457     ################################################################################
2458     def get_anyversion(self, sv_list, suite):
2459         """
2460         @type sv_list: list
2461         @param sv_list: list of (suite, version) tuples to check
2462
2463         @type suite: string
2464         @param suite: suite name
2465
2466         Description: TODO
2467         """
2468         Cnf = Config()
2469         anyversion = None
2470         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2471         for (s, v) in sv_list:
2472             if s in [ x.lower() for x in anysuite ]:
2473                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2474                     anyversion = v
2475
2476         return anyversion
2477
2478     ################################################################################
2479
2480     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2481         """
2482         @type sv_list: list
2483         @param sv_list: list of (suite, version) tuples to check
2484
2485         @type filename: string
2486         @param filename: XXX
2487
2488         @type new_version: string
2489         @param new_version: XXX
2490
2491         Ensure versions are newer than existing packages in target
2492         suites and that cross-suite version checking rules as
2493         set out in the conf file are satisfied.
2494         """
2495
2496         cnf = Config()
2497
2498         # Check versions for each target suite
2499         for target_suite in self.pkg.changes["distribution"].keys():
2500             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2501             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2502
2503             # Enforce "must be newer than target suite" even if conffile omits it
2504             if target_suite not in must_be_newer_than:
2505                 must_be_newer_than.append(target_suite)
2506
2507             for (suite, existent_version) in sv_list:
2508                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2509
2510                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2511                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2512
2513                 if suite in must_be_older_than and vercmp > -1:
2514                     cansave = 0
2515
2516                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2517                         # we really use the other suite, ignoring the conflicting one ...
2518                         addsuite = self.pkg.changes["distribution-version"][suite]
2519
2520                         add_version = self.get_anyversion(sv_list, addsuite)
2521                         target_version = self.get_anyversion(sv_list, target_suite)
2522
2523                         if not add_version:
2524                             # not add_version can only happen if we map to a suite
2525                             # that doesn't enhance the suite we're propup'ing from.
2526                             # so "propup-ver x a b c; map a d" is a problem only if
2527                             # d doesn't enhance a.
2528                             #
2529                             # i think we could always propagate in this case, rather
2530                             # than complaining. either way, this isn't a REJECT issue
2531                             #
2532                             # And - we really should complain to the dorks who configured dak
2533                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2534                             self.pkg.changes.setdefault("propdistribution", {})
2535                             self.pkg.changes["propdistribution"][addsuite] = 1
2536                             cansave = 1
2537                         elif not target_version:
2538                             # not targets_version is true when the package is NEW
2539                             # we could just stick with the "...old version..." REJECT
2540                             # for this, I think.
2541                             self.rejects.append("Won't propogate NEW packages.")
2542                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2543                             # propogation would be redundant. no need to reject though.
2544                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2545                             cansave = 1
2546                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2547                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2548                             # propogate!!
2549                             self.warnings.append("Propogating upload to %s" % (addsuite))
2550                             self.pkg.changes.setdefault("propdistribution", {})
2551                             self.pkg.changes["propdistribution"][addsuite] = 1
2552                             cansave = 1
2553
2554                     if not cansave:
2555                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2556
2557     ################################################################################
2558     def check_binary_against_db(self, filename, session):
2559         # Ensure version is sane
2560         self.cross_suite_version_check( \
2561             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2562                 self.pkg.files[filename]["architecture"], session),
2563             filename, self.pkg.files[filename]["version"], sourceful=False)
2564
2565         # Check for any existing copies of the file
2566         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2567         q = q.filter_by(version=self.pkg.files[filename]["version"])
2568         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2569
2570         if q.count() > 0:
2571             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2572
2573     ################################################################################
2574
2575     def check_source_against_db(self, filename, session):
2576         source = self.pkg.dsc.get("source")
2577         version = self.pkg.dsc.get("version")
2578
2579         # Ensure version is sane
2580         self.cross_suite_version_check( \
2581             get_suite_version_by_source(source, session), filename, version,
2582             sourceful=True)
2583
2584     ################################################################################
2585     def check_dsc_against_db(self, filename, session):
2586         """
2587
2588         @warning: NB: this function can remove entries from the 'files' index [if
2589          the orig tarball is a duplicate of the one in the archive]; if
2590          you're iterating over 'files' and call this function as part of
2591          the loop, be sure to add a check to the top of the loop to
2592          ensure you haven't just tried to dereference the deleted entry.
2593
2594         """
2595
2596         Cnf = Config()
2597         self.pkg.orig_files = {} # XXX: do we need to clear it?
2598         orig_files = self.pkg.orig_files
2599
2600         # Try and find all files mentioned in the .dsc.  This has
2601         # to work harder to cope with the multiple possible
2602         # locations of an .orig.tar.gz.
2603         # The ordering on the select is needed to pick the newest orig
2604         # when it exists in multiple places.
2605         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2606             found = None
2607             if self.pkg.files.has_key(dsc_name):
2608                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2609                 actual_size = int(self.pkg.files[dsc_name]["size"])
2610                 found = "%s in incoming" % (dsc_name)
2611
2612                 # Check the file does not already exist in the archive
2613                 ql = get_poolfile_like_name(dsc_name, session)
2614
2615                 # Strip out anything that isn't '%s' or '/%s$'
2616                 for i in ql:
2617                     if not i.filename.endswith(dsc_name):
2618                         ql.remove(i)
2619
2620                 # "[dak] has not broken them.  [dak] has fixed a
2621                 # brokenness.  Your crappy hack exploited a bug in
2622                 # the old dinstall.
2623                 #
2624                 # "(Come on!  I thought it was always obvious that
2625                 # one just doesn't release different files with
2626                 # the same name and version.)"
2627                 #                        -- ajk@ on d-devel@l.d.o
2628
2629                 if len(ql) > 0:
2630                     # Ignore exact matches for .orig.tar.gz
2631                     match = 0
2632                     if re_is_orig_source.match(dsc_name):
2633                         for i in ql:
2634                             if self.pkg.files.has_key(dsc_name) and \
2635                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2636                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2637                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2638                                 # TODO: Don't delete the entry, just mark it as not needed
2639                                 # This would fix the stupidity of changing something we often iterate over
2640                                 # whilst we're doing it
2641                                 del self.pkg.files[dsc_name]
2642                                 dsc_entry["files id"] = i.file_id
2643                                 if not orig_files.has_key(dsc_name):
2644                                     orig_files[dsc_name] = {}
2645                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2646                                 match = 1
2647
2648                                 # Don't bitch that we couldn't find this file later
2649                                 try:
2650                                     self.later_check_files.remove(dsc_name)
2651                                 except ValueError:
2652                                     pass
2653
2654
2655                     if not match:
2656                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2657
2658             elif re_is_orig_source.match(dsc_name):
2659                 # Check in the pool
2660                 ql = get_poolfile_like_name(dsc_name, session)
2661
2662                 # Strip out anything that isn't '%s' or '/%s$'
2663                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2664                 for i in ql:
2665                     if not i.filename.endswith(dsc_name):
2666                         ql.remove(i)
2667
2668                 if len(ql) > 0:
2669                     # Unfortunately, we may get more than one match here if,
2670                     # for example, the package was in potato but had an -sa
2671                     # upload in woody.  So we need to choose the right one.
2672
2673                     # default to something sane in case we don't match any or have only one
2674                     x = ql[0]
2675
2676                     if len(ql) > 1:
2677                         for i in ql:
2678                             old_file = os.path.join(i.location.path, i.filename)
2679                             old_file_fh = utils.open_file(old_file)
2680                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2681                             old_file_fh.close()
2682                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2683                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2684                                 x = i
2685
2686                     old_file = os.path.join(i.location.path, i.filename)
2687                     old_file_fh = utils.open_file(old_file)
2688                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2689                     old_file_fh.close()
2690                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2691                     found = old_file
2692                     suite_type = x.location.archive_type
2693                     # need this for updating dsc_files in install()
2694                     dsc_entry["files id"] = x.file_id
2695                     # See install() in process-accepted...
2696                     if not orig_files.has_key(dsc_name):
2697                         orig_files[dsc_name] = {}
2698                     orig_files[dsc_name]["id"] = x.file_id
2699                     orig_files[dsc_name]["path"] = old_file
2700                     orig_files[dsc_name]["location"] = x.location.location_id
2701                 else:
2702                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2703                     # Not there? Check the queue directories...
2704                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2705                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2706                             continue
2707                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2708                         if os.path.exists(in_otherdir):
2709                             in_otherdir_fh = utils.open_file(in_otherdir)
2710                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2711                             in_otherdir_fh.close()
2712                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2713                             found = in_otherdir
2714                             if not orig_files.has_key(dsc_name):
2715                                 orig_files[dsc_name] = {}
2716                             orig_files[dsc_name]["path"] = in_otherdir
2717
2718                     if not found:
2719                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2720                         continue
2721             else:
2722                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2723                 continue
2724             if actual_md5 != dsc_entry["md5sum"]:
2725                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2726             if actual_size != int(dsc_entry["size"]):
2727                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2728
2729     ################################################################################
2730     # This is used by process-new and process-holding to recheck a changes file
2731     # at the time we're running.  It mainly wraps various other internal functions
2732     # and is similar to accepted_checks - these should probably be tidied up
2733     # and combined
2734     def recheck(self, session):
2735         cnf = Config()
2736         for f in self.pkg.files.keys():
2737             # The .orig.tar.gz can disappear out from under us is it's a
2738             # duplicate of one in the archive.
2739             if not self.pkg.files.has_key(f):
2740                 continue
2741
2742             entry = self.pkg.files[f]
2743
2744             # Check that the source still exists
2745             if entry["type"] == "deb":
2746                 source_version = entry["source version"]
2747                 source_package = entry["source package"]
2748                 if not self.pkg.changes["architecture"].has_key("source") \
2749                    and not source_exists(source_package, source_version, \
2750                     suites = self.pkg.changes["distribution"].keys(), session = session):
2751                     source_epochless_version = re_no_epoch.sub('', source_version)
2752                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2753                     found = False
2754                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2755                         if cnf.has_key("Dir::Queue::%s" % (q)):
2756                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2757                                 found = True
2758                     if not found:
2759                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2760
2761             # Version and file overwrite checks
2762             if entry["type"] == "deb":
2763                 self.check_binary_against_db(f, session)
2764             elif entry["type"] == "dsc":
2765                 self.check_source_against_db(f, session)
2766                 self.check_dsc_against_db(f, session)
2767
2768     ################################################################################
2769     def accepted_checks(self, overwrite_checks, session):
2770         # Recheck anything that relies on the database; since that's not
2771         # frozen between accept and our run time when called from p-a.
2772
2773         # overwrite_checks is set to False when installing to stable/oldstable
2774
2775         propogate={}
2776         nopropogate={}
2777
2778         # Find the .dsc (again)
2779         dsc_filename = None
2780         for f in self.pkg.files.keys():
2781             if self.pkg.files[f]["type"] == "dsc":
2782                 dsc_filename = f
2783
2784         for checkfile in self.pkg.files.keys():
2785             # The .orig.tar.gz can disappear out from under us is it's a
2786             # duplicate of one in the archive.
2787             if not self.pkg.files.has_key(checkfile):
2788                 continue
2789
2790             entry = self.pkg.files[checkfile]
2791
2792             # Check that the source still exists
2793             if entry["type"] == "deb":
2794                 source_version = entry["source version"]
2795                 source_package = entry["source package"]
2796                 if not self.pkg.changes["architecture"].has_key("source") \
2797                    and not source_exists(source_package, source_version, \
2798                     suites = self.pkg.changes["distribution"].keys(), \
2799                     session = session):
2800                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2801
2802             # Version and file overwrite checks
2803             if overwrite_checks:
2804                 if entry["type"] == "deb":
2805                     self.check_binary_against_db(checkfile, session)
2806                 elif entry["type"] == "dsc":
2807                     self.check_source_against_db(checkfile, session)
2808                     self.check_dsc_against_db(dsc_filename, session)
2809
2810             # propogate in the case it is in the override tables:
2811             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2812                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2813                     propogate[suite] = 1
2814                 else:
2815                     nopropogate[suite] = 1
2816
2817         for suite in propogate.keys():
2818             if suite in nopropogate:
2819                 continue
2820             self.pkg.changes["distribution"][suite] = 1
2821
2822         for checkfile in self.pkg.files.keys():
2823             # Check the package is still in the override tables
2824             for suite in self.pkg.changes["distribution"].keys():
2825                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2826                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2827
2828     ################################################################################
2829     # If any file of an upload has a recent mtime then chances are good
2830     # the file is still being uploaded.
2831
2832     def upload_too_new(self):
2833         cnf = Config()
2834         too_new = False
2835         # Move back to the original directory to get accurate time stamps
2836         cwd = os.getcwd()
2837         os.chdir(self.pkg.directory)
2838         file_list = self.pkg.files.keys()
2839         file_list.extend(self.pkg.dsc_files.keys())
2840         file_list.append(self.pkg.changes_file)
2841         for f in file_list:
2842             try:
2843                 last_modified = time.time()-os.path.getmtime(f)
2844                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2845                     too_new = True
2846                     break
2847             except:
2848                 pass
2849
2850         os.chdir(cwd)
2851         return too_new
2852
2853     def store_changelog(self):
2854
2855         # Skip binary-only upload if it is not a bin-NMU
2856         if not self.pkg.changes['architecture'].has_key('source'):
2857             from daklib.regexes import re_bin_only_nmu
2858             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2859                 return
2860
2861         session = DBConn().session()
2862
2863         # Check if upload already has a changelog entry
2864         query = """SELECT changelog_id FROM changes WHERE source = :source
2865                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2866         if session.execute(query, {'source': self.pkg.changes['source'], \
2867                                    'version': self.pkg.changes['version'], \
2868                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2869             session.commit()
2870             return
2871
2872         # Add current changelog text into changelogs_text table, return created ID
2873         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2874         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2875
2876         # Link ID to the upload available in changes table
2877         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2878                    AND version = :version AND architecture = :architecture"""
2879         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2880                                 'version': self.pkg.changes['version'], \
2881                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2882
2883         session.commit()