]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote branch 'mhy/master' into merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57
58 # suppress some deprecation warnings in squeeze related to apt_pkg
59 # module
60 import warnings
61 warnings.filterwarnings('ignore', \
62     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
63     DeprecationWarning)
64 warnings.filterwarnings('ignore', \
65     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
66     DeprecationWarning)
67
68 ###############################################################################
69
70 def get_type(f, session):
71     """
72     Get the file type of C{f}
73
74     @type f: dict
75     @param f: file entry from Changes object
76
77     @type session: SQLA Session
78     @param session: SQL Alchemy session object
79
80     @rtype: string
81     @return: filetype
82
83     """
84     # Determine the type
85     if f.has_key("dbtype"):
86         file_type = f["dbtype"]
87     elif re_source_ext.match(f["type"]):
88         file_type = "dsc"
89     else:
90         file_type = f["type"]
91         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
92
93     # Validate the override type
94     type_id = get_override_type(file_type, session)
95     if type_id is None:
96         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
97
98     return file_type
99
100 ################################################################################
101
102 # Determine what parts in a .changes are NEW
103
104 def determine_new(filename, changes, files, warn=1, session = None):
105     """
106     Determine what parts in a C{changes} file are NEW.
107
108     @type filename: str
109     @param filename: changes filename
110
111     @type changes: Upload.Pkg.changes dict
112     @param changes: Changes dictionary
113
114     @type files: Upload.Pkg.files dict
115     @param files: Files dictionary
116
117     @type warn: bool
118     @param warn: Warn if overrides are added for (old)stable
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     new = {}
127     byhand = {}
128
129     dbchg = get_dbchange(filename, session)
130     if dbchg is None:
131         print "Warning: cannot find changes file in database; won't check byhand"
132
133     # Build up a list of potentially new things
134     for name, f in files.items():
135         # Keep a record of byhand elements
136         if f["section"] == "byhand":
137             byhand[name] = 1
138             continue
139
140         pkg = f["package"]
141         priority = f["priority"]
142         section = f["section"]
143         file_type = get_type(f, session)
144         component = f["component"]
145
146         if file_type == "dsc":
147             priority = "source"
148
149         if not new.has_key(pkg):
150             new[pkg] = {}
151             new[pkg]["priority"] = priority
152             new[pkg]["section"] = section
153             new[pkg]["type"] = file_type
154             new[pkg]["component"] = component
155             new[pkg]["files"] = []
156         else:
157             old_type = new[pkg]["type"]
158             if old_type != file_type:
159                 # source gets trumped by deb or udeb
160                 if old_type == "dsc":
161                     new[pkg]["priority"] = priority
162                     new[pkg]["section"] = section
163                     new[pkg]["type"] = file_type
164                     new[pkg]["component"] = component
165
166         new[pkg]["files"].append(name)
167
168         if f.has_key("othercomponents"):
169             new[pkg]["othercomponents"] = f["othercomponents"]
170
171     # Fix up the list of target suites
172     cnf = Config()
173     for suite in changes["suite"].keys():
174         oldsuite = get_suite(suite, session)
175         if not oldsuite:
176             print "WARNING: Invalid suite %s found" % suite
177             continue
178
179         if oldsuite.overridesuite:
180             newsuite = get_suite(oldsuite.overridesuite, session)
181
182             if newsuite:
183                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
184                     oldsuite.overridesuite, suite)
185                 del changes["suite"][suite]
186                 changes["suite"][oldsuite.overridesuite] = 1
187             else:
188                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
189                     oldsuite.overridesuite, suite)
190
191     # Check for unprocessed byhand files
192     if dbchg is not None:
193         for b in byhand.keys():
194             # Find the file entry in the database
195             found = False
196             for f in dbchg.files:
197                 if f.filename == b:
198                     found = True
199                     # If it's processed, we can ignore it
200                     if f.processed:
201                         del byhand[b]
202                     break
203
204             if not found:
205                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
206
207     # Check for new stuff
208     for suite in changes["suite"].keys():
209         for pkg in new.keys():
210             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
211             if len(ql) > 0:
212                 for file_entry in new[pkg]["files"]:
213                     if files[file_entry].has_key("new"):
214                         del files[file_entry]["new"]
215                 del new[pkg]
216
217     if warn:
218         for s in ['stable', 'oldstable']:
219             if changes["suite"].has_key(s):
220                 print "WARNING: overrides will be added for %s!" % s
221         for pkg in new.keys():
222             if new[pkg].has_key("othercomponents"):
223                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
224
225     return new, byhand
226
227 ################################################################################
228
229 def check_valid(new, session = None):
230     """
231     Check if section and priority for NEW packages exist in database.
232     Additionally does sanity checks:
233       - debian-installer packages have to be udeb (or source)
234       - non debian-installer packages can not be udeb
235       - source priority can only be assigned to dsc file types
236
237     @type new: dict
238     @param new: Dict of new packages with their section, priority and type.
239
240     """
241     for pkg in new.keys():
242         section_name = new[pkg]["section"]
243         priority_name = new[pkg]["priority"]
244         file_type = new[pkg]["type"]
245
246         section = get_section(section_name, session)
247         if section is None:
248             new[pkg]["section id"] = -1
249         else:
250             new[pkg]["section id"] = section.section_id
251
252         priority = get_priority(priority_name, session)
253         if priority is None:
254             new[pkg]["priority id"] = -1
255         else:
256             new[pkg]["priority id"] = priority.priority_id
257
258         # Sanity checks
259         di = section_name.find("debian-installer") != -1
260
261         # If d-i, we must be udeb and vice-versa
262         if     (di and file_type not in ("udeb", "dsc")) or \
263            (not di and file_type == "udeb"):
264             new[pkg]["section id"] = -1
265
266         # If dsc we need to be source and vice-versa
267         if (priority == "source" and file_type != "dsc") or \
268            (priority != "source" and file_type == "dsc"):
269             new[pkg]["priority id"] = -1
270
271 ###############################################################################
272
273 # Used by Upload.check_timestamps
274 class TarTime(object):
275     def __init__(self, future_cutoff, past_cutoff):
276         self.reset()
277         self.future_cutoff = future_cutoff
278         self.past_cutoff = past_cutoff
279
280     def reset(self):
281         self.future_files = {}
282         self.ancient_files = {}
283
284     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
285         if MTime > self.future_cutoff:
286             self.future_files[Name] = MTime
287         if MTime < self.past_cutoff:
288             self.ancient_files[Name] = MTime
289
290 ###############################################################################
291
292 def prod_maintainer(notes, upload):
293     cnf = Config()
294
295     # Here we prepare an editor and get them ready to prod...
296     (fd, temp_filename) = utils.temp_filename()
297     temp_file = os.fdopen(fd, 'w')
298     for note in notes:
299         temp_file.write(note.comment)
300     temp_file.close()
301     editor = os.environ.get("EDITOR","vi")
302     answer = 'E'
303     while answer == 'E':
304         os.system("%s %s" % (editor, temp_filename))
305         temp_fh = utils.open_file(temp_filename)
306         prod_message = "".join(temp_fh.readlines())
307         temp_fh.close()
308         print "Prod message:"
309         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
310         prompt = "[P]rod, Edit, Abandon, Quit ?"
311         answer = "XXX"
312         while prompt.find(answer) == -1:
313             answer = utils.our_raw_input(prompt)
314             m = re_default_answer.search(prompt)
315             if answer == "":
316                 answer = m.group(1)
317             answer = answer[:1].upper()
318     os.unlink(temp_filename)
319     if answer == 'A':
320         return
321     elif answer == 'Q':
322         end()
323         sys.exit(0)
324     # Otherwise, do the proding...
325     user_email_address = utils.whoami() + " <%s>" % (
326         cnf["Dinstall::MyAdminAddress"])
327
328     Subst = upload.Subst
329
330     Subst["__FROM_ADDRESS__"] = user_email_address
331     Subst["__PROD_MESSAGE__"] = prod_message
332     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
333
334     prod_mail_message = utils.TemplateSubst(
335         Subst,cnf["Dir::Templates"]+"/process-new.prod")
336
337     # Send the prod mail
338     utils.send_mail(prod_mail_message)
339
340     print "Sent prodding message"
341
342 ################################################################################
343
344 def edit_note(note, upload, session, trainee=False):
345     # Write the current data to a temporary file
346     (fd, temp_filename) = utils.temp_filename()
347     editor = os.environ.get("EDITOR","vi")
348     answer = 'E'
349     while answer == 'E':
350         os.system("%s %s" % (editor, temp_filename))
351         temp_file = utils.open_file(temp_filename)
352         newnote = temp_file.read().rstrip()
353         temp_file.close()
354         print "New Note:"
355         print utils.prefix_multi_line_string(newnote,"  ")
356         prompt = "[D]one, Edit, Abandon, Quit ?"
357         answer = "XXX"
358         while prompt.find(answer) == -1:
359             answer = utils.our_raw_input(prompt)
360             m = re_default_answer.search(prompt)
361             if answer == "":
362                 answer = m.group(1)
363             answer = answer[:1].upper()
364     os.unlink(temp_filename)
365     if answer == 'A':
366         return
367     elif answer == 'Q':
368         end()
369         sys.exit(0)
370
371     comment = NewComment()
372     comment.package = upload.pkg.changes["source"]
373     comment.version = upload.pkg.changes["version"]
374     comment.comment = newnote
375     comment.author  = utils.whoami()
376     comment.trainee = trainee
377     session.add(comment)
378     session.commit()
379
380 ###############################################################################
381
382 # suite names DMs can upload to
383 dm_suites = ['unstable', 'experimental']
384
385 def get_newest_source(source, session):
386     'returns the newest DBSource object in dm_suites'
387     ## the most recent version of the package uploaded to unstable or
388     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
389     ## section of its control file
390     q = session.query(DBSource).filter_by(source = source). \
391         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
392         order_by(desc('source.version'))
393     return q.first()
394
395 def get_suite_version_by_source(source, session):
396     'returns a list of tuples (suite_name, version) for source package'
397     q = session.query(Suite.suite_name, DBSource.version). \
398         join(Suite.sources).filter_by(source = source)
399     return q.all()
400
401 def get_source_by_package_and_suite(package, suite_name, session):
402     '''
403     returns a DBSource query filtered by DBBinary.package and this package's
404     suite_name
405     '''
406     return session.query(DBSource). \
407         join(DBSource.binaries).filter_by(package = package). \
408         join(DBBinary.suites).filter_by(suite_name = suite_name)
409
410 def get_suite_version_by_package(package, arch_string, session):
411     '''
412     returns a list of tuples (suite_name, version) for binary package and
413     arch_string
414     '''
415     return session.query(Suite.suite_name, DBBinary.version). \
416         join(Suite.binaries).filter_by(package = package). \
417         join(DBBinary.architecture). \
418         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
419
420 class Upload(object):
421     """
422     Everything that has to do with an upload processed.
423
424     """
425     def __init__(self):
426         self.logger = None
427         self.pkg = Changes()
428         self.reset()
429
430     ###########################################################################
431
432     def reset (self):
433         """ Reset a number of internal variables."""
434
435         # Initialize the substitution template map
436         cnf = Config()
437         self.Subst = {}
438         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
439         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
440         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
441         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
442
443         self.rejects = []
444         self.warnings = []
445         self.notes = []
446
447         self.later_check_files = []
448
449         self.pkg.reset()
450
451     def package_info(self):
452         """
453         Format various messages from this Upload to send to the maintainer.
454         """
455
456         msgs = (
457             ('Reject Reasons', self.rejects),
458             ('Warnings', self.warnings),
459             ('Notes', self.notes),
460         )
461
462         msg = ''
463         for title, messages in msgs:
464             if messages:
465                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
466         msg += '\n\n'
467
468         return msg
469
470     ###########################################################################
471     def update_subst(self):
472         """ Set up the per-package template substitution mappings """
473
474         cnf = Config()
475
476         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
477         if not self.pkg.changes.has_key("architecture") or not \
478            isinstance(self.pkg.changes["architecture"], dict):
479             self.pkg.changes["architecture"] = { "Unknown" : "" }
480
481         # and maintainer2047 may not exist.
482         if not self.pkg.changes.has_key("maintainer2047"):
483             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
484
485         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
486         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
487         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
488
489         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
490         if self.pkg.changes["architecture"].has_key("source") and \
491            self.pkg.changes["changedby822"] != "" and \
492            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
493
494             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
495             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
496             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
497         else:
498             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
499             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
500             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
501
502         # Process policy doesn't set the fingerprint field and I don't want to make it
503         # do it for now as I don't want to have to deal with the case where we accepted
504         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
505         # the meantime so the package will be remarked as rejectable.  Urgh.
506         # TODO: Fix this properly
507         if self.pkg.changes.has_key('fingerprint'):
508             session = DBConn().session()
509             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
510             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
511                 if self.pkg.changes.has_key("sponsoremail"):
512                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
513             session.close()
514
515         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
516             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
517
518         # Apply any global override of the Maintainer field
519         if cnf.get("Dinstall::OverrideMaintainer"):
520             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
521             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
522
523         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
524         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
525         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
526         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
527
528     ###########################################################################
529     def load_changes(self, filename):
530         """
531         Load a changes file and setup a dictionary around it. Also checks for mandantory
532         fields  within.
533
534         @type filename: string
535         @param filename: Changes filename, full path.
536
537         @rtype: boolean
538         @return: whether the changes file was valid or not.  We may want to
539                  reject even if this is True (see what gets put in self.rejects).
540                  This is simply to prevent us even trying things later which will
541                  fail because we couldn't properly parse the file.
542         """
543         Cnf = Config()
544         self.pkg.changes_file = filename
545
546         # Parse the .changes field into a dictionary
547         try:
548             self.pkg.changes.update(parse_changes(filename))
549         except CantOpenError:
550             self.rejects.append("%s: can't read file." % (filename))
551             return False
552         except ParseChangesError, line:
553             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
554             return False
555         except ChangesUnicodeError:
556             self.rejects.append("%s: changes file not proper utf-8" % (filename))
557             return False
558
559         # Parse the Files field from the .changes into another dictionary
560         try:
561             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
562         except ParseChangesError, line:
563             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
564             return False
565         except UnknownFormatError, format:
566             self.rejects.append("%s: unknown format '%s'." % (filename, format))
567             return False
568
569         # Check for mandatory fields
570         for i in ("distribution", "source", "binary", "architecture",
571                   "version", "maintainer", "files", "changes", "description"):
572             if not self.pkg.changes.has_key(i):
573                 # Avoid undefined errors later
574                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
575                 return False
576
577         # Strip a source version in brackets from the source field
578         if re_strip_srcver.search(self.pkg.changes["source"]):
579             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
580
581         # Ensure the source field is a valid package name.
582         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
583             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
584
585         # Split multi-value fields into a lower-level dictionary
586         for i in ("architecture", "distribution", "binary", "closes"):
587             o = self.pkg.changes.get(i, "")
588             if o != "":
589                 del self.pkg.changes[i]
590
591             self.pkg.changes[i] = {}
592
593             for j in o.split():
594                 self.pkg.changes[i][j] = 1
595
596         # Fix the Maintainer: field to be RFC822/2047 compatible
597         try:
598             (self.pkg.changes["maintainer822"],
599              self.pkg.changes["maintainer2047"],
600              self.pkg.changes["maintainername"],
601              self.pkg.changes["maintaineremail"]) = \
602                    fix_maintainer (self.pkg.changes["maintainer"])
603         except ParseMaintError, msg:
604             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
605                    % (filename, self.pkg.changes["maintainer"], msg))
606
607         # ...likewise for the Changed-By: field if it exists.
608         try:
609             (self.pkg.changes["changedby822"],
610              self.pkg.changes["changedby2047"],
611              self.pkg.changes["changedbyname"],
612              self.pkg.changes["changedbyemail"]) = \
613                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
614         except ParseMaintError, msg:
615             self.pkg.changes["changedby822"] = ""
616             self.pkg.changes["changedby2047"] = ""
617             self.pkg.changes["changedbyname"] = ""
618             self.pkg.changes["changedbyemail"] = ""
619
620             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
621                    % (filename, self.pkg.changes["changed-by"], msg))
622
623         # Ensure all the values in Closes: are numbers
624         if self.pkg.changes.has_key("closes"):
625             for i in self.pkg.changes["closes"].keys():
626                 if re_isanum.match (i) == None:
627                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
628
629         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
630         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
631         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
632
633         # Check the .changes is non-empty
634         if not self.pkg.files:
635             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
636             return False
637
638         # Changes was syntactically valid even if we'll reject
639         return True
640
641     ###########################################################################
642
643     def check_distributions(self):
644         "Check and map the Distribution field"
645
646         Cnf = Config()
647
648         # Handle suite mappings
649         for m in Cnf.ValueList("SuiteMappings"):
650             args = m.split()
651             mtype = args[0]
652             if mtype == "map" or mtype == "silent-map":
653                 (source, dest) = args[1:3]
654                 if self.pkg.changes["distribution"].has_key(source):
655                     del self.pkg.changes["distribution"][source]
656                     self.pkg.changes["distribution"][dest] = 1
657                     if mtype != "silent-map":
658                         self.notes.append("Mapping %s to %s." % (source, dest))
659                 if self.pkg.changes.has_key("distribution-version"):
660                     if self.pkg.changes["distribution-version"].has_key(source):
661                         self.pkg.changes["distribution-version"][source]=dest
662             elif mtype == "map-unreleased":
663                 (source, dest) = args[1:3]
664                 if self.pkg.changes["distribution"].has_key(source):
665                     for arch in self.pkg.changes["architecture"].keys():
666                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
667                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
668                             del self.pkg.changes["distribution"][source]
669                             self.pkg.changes["distribution"][dest] = 1
670                             break
671             elif mtype == "ignore":
672                 suite = args[1]
673                 if self.pkg.changes["distribution"].has_key(suite):
674                     del self.pkg.changes["distribution"][suite]
675                     self.warnings.append("Ignoring %s as a target suite." % (suite))
676             elif mtype == "reject":
677                 suite = args[1]
678                 if self.pkg.changes["distribution"].has_key(suite):
679                     self.rejects.append("Uploads to %s are not accepted." % (suite))
680             elif mtype == "propup-version":
681                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
682                 #
683                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
684                 if self.pkg.changes["distribution"].has_key(args[1]):
685                     self.pkg.changes.setdefault("distribution-version", {})
686                     for suite in args[2:]:
687                         self.pkg.changes["distribution-version"][suite] = suite
688
689         # Ensure there is (still) a target distribution
690         if len(self.pkg.changes["distribution"].keys()) < 1:
691             self.rejects.append("No valid distribution remaining.")
692
693         # Ensure target distributions exist
694         for suite in self.pkg.changes["distribution"].keys():
695             if not Cnf.has_key("Suite::%s" % (suite)):
696                 self.rejects.append("Unknown distribution `%s'." % (suite))
697
698     ###########################################################################
699
700     def binary_file_checks(self, f, session):
701         cnf = Config()
702         entry = self.pkg.files[f]
703
704         # Extract package control information
705         deb_file = utils.open_file(f)
706         try:
707             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
708         except:
709             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
710             deb_file.close()
711             # Can't continue, none of the checks on control would work.
712             return
713
714         # Check for mandantory "Description:"
715         deb_file.seek(0)
716         try:
717             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
718         except:
719             self.rejects.append("%s: Missing Description in binary package" % (f))
720             return
721
722         deb_file.close()
723
724         # Check for mandatory fields
725         for field in [ "Package", "Architecture", "Version" ]:
726             if control.Find(field) == None:
727                 # Can't continue
728                 self.rejects.append("%s: No %s field in control." % (f, field))
729                 return
730
731         # Ensure the package name matches the one give in the .changes
732         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
733             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
734
735         # Validate the package field
736         package = control.Find("Package")
737         if not re_valid_pkg_name.match(package):
738             self.rejects.append("%s: invalid package name '%s'." % (f, package))
739
740         # Validate the version field
741         version = control.Find("Version")
742         if not re_valid_version.match(version):
743             self.rejects.append("%s: invalid version number '%s'." % (f, version))
744
745         # Ensure the architecture of the .deb is one we know about.
746         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
747         architecture = control.Find("Architecture")
748         upload_suite = self.pkg.changes["distribution"].keys()[0]
749
750         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
751             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
752             self.rejects.append("Unknown architecture '%s'." % (architecture))
753
754         # Ensure the architecture of the .deb is one of the ones
755         # listed in the .changes.
756         if not self.pkg.changes["architecture"].has_key(architecture):
757             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
758
759         # Sanity-check the Depends field
760         depends = control.Find("Depends")
761         if depends == '':
762             self.rejects.append("%s: Depends field is empty." % (f))
763
764         # Sanity-check the Provides field
765         provides = control.Find("Provides")
766         if provides:
767             provide = re_spacestrip.sub('', provides)
768             if provide == '':
769                 self.rejects.append("%s: Provides field is empty." % (f))
770             prov_list = provide.split(",")
771             for prov in prov_list:
772                 if not re_valid_pkg_name.match(prov):
773                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
774
775         # Check the section & priority match those given in the .changes (non-fatal)
776         if     control.Find("Section") and entry["section"] != "" \
777            and entry["section"] != control.Find("Section"):
778             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
779                                 (f, control.Find("Section", ""), entry["section"]))
780         if control.Find("Priority") and entry["priority"] != "" \
781            and entry["priority"] != control.Find("Priority"):
782             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
783                                 (f, control.Find("Priority", ""), entry["priority"]))
784
785         entry["package"] = package
786         entry["architecture"] = architecture
787         entry["version"] = version
788         entry["maintainer"] = control.Find("Maintainer", "")
789
790         if f.endswith(".udeb"):
791             self.pkg.files[f]["dbtype"] = "udeb"
792         elif f.endswith(".deb"):
793             self.pkg.files[f]["dbtype"] = "deb"
794         else:
795             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
796
797         entry["source"] = control.Find("Source", entry["package"])
798
799         # Get the source version
800         source = entry["source"]
801         source_version = ""
802
803         if source.find("(") != -1:
804             m = re_extract_src_version.match(source)
805             source = m.group(1)
806             source_version = m.group(2)
807
808         if not source_version:
809             source_version = self.pkg.files[f]["version"]
810
811         entry["source package"] = source
812         entry["source version"] = source_version
813
814         # Ensure the filename matches the contents of the .deb
815         m = re_isadeb.match(f)
816
817         #  package name
818         file_package = m.group(1)
819         if entry["package"] != file_package:
820             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
821                                 (f, file_package, entry["dbtype"], entry["package"]))
822         epochless_version = re_no_epoch.sub('', control.Find("Version"))
823
824         #  version
825         file_version = m.group(2)
826         if epochless_version != file_version:
827             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
828                                 (f, file_version, entry["dbtype"], epochless_version))
829
830         #  architecture
831         file_architecture = m.group(3)
832         if entry["architecture"] != file_architecture:
833             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
834                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
835
836         # Check for existent source
837         source_version = entry["source version"]
838         source_package = entry["source package"]
839         if self.pkg.changes["architecture"].has_key("source"):
840             if source_version != self.pkg.changes["version"]:
841                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
842                                     (source_version, f, self.pkg.changes["version"]))
843         else:
844             # Check in the SQL database
845             if not source_exists(source_package, source_version, suites = \
846                 self.pkg.changes["distribution"].keys(), session = session):
847                 # Check in one of the other directories
848                 source_epochless_version = re_no_epoch.sub('', source_version)
849                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
850                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
851                     entry["byhand"] = 1
852                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
853                     entry["new"] = 1
854                 else:
855                     dsc_file_exists = False
856                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
857                         if cnf.has_key("Dir::Queue::%s" % (myq)):
858                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
859                                 dsc_file_exists = True
860                                 break
861
862                     if not dsc_file_exists:
863                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
864
865         # Check the version and for file overwrites
866         self.check_binary_against_db(f, session)
867
868     def source_file_checks(self, f, session):
869         entry = self.pkg.files[f]
870
871         m = re_issource.match(f)
872         if not m:
873             return
874
875         entry["package"] = m.group(1)
876         entry["version"] = m.group(2)
877         entry["type"] = m.group(3)
878
879         # Ensure the source package name matches the Source filed in the .changes
880         if self.pkg.changes["source"] != entry["package"]:
881             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
882
883         # Ensure the source version matches the version in the .changes file
884         if re_is_orig_source.match(f):
885             changes_version = self.pkg.changes["chopversion2"]
886         else:
887             changes_version = self.pkg.changes["chopversion"]
888
889         if changes_version != entry["version"]:
890             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
891
892         # Ensure the .changes lists source in the Architecture field
893         if not self.pkg.changes["architecture"].has_key("source"):
894             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
895
896         # Check the signature of a .dsc file
897         if entry["type"] == "dsc":
898             # check_signature returns either:
899             #  (None, [list, of, rejects]) or (signature, [])
900             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
901             for j in rejects:
902                 self.rejects.append(j)
903
904         entry["architecture"] = "source"
905
906     def per_suite_file_checks(self, f, suite, session):
907         cnf = Config()
908         entry = self.pkg.files[f]
909
910         # Skip byhand
911         if entry.has_key("byhand"):
912             return
913
914         # Check we have fields we need to do these checks
915         oktogo = True
916         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
917             if not entry.has_key(m):
918                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
919                 oktogo = False
920
921         if not oktogo:
922             return
923
924         # Handle component mappings
925         for m in cnf.ValueList("ComponentMappings"):
926             (source, dest) = m.split()
927             if entry["component"] == source:
928                 entry["original component"] = source
929                 entry["component"] = dest
930
931         # Ensure the component is valid for the target suite
932         if cnf.has_key("Suite:%s::Components" % (suite)) and \
933            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
934             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
935             return
936
937         # Validate the component
938         if not get_component(entry["component"], session):
939             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
940             return
941
942         # See if the package is NEW
943         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
944             entry["new"] = 1
945
946         # Validate the priority
947         if entry["priority"].find('/') != -1:
948             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
949
950         # Determine the location
951         location = cnf["Dir::Pool"]
952         l = get_location(location, entry["component"], session=session)
953         if l is None:
954             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
955             entry["location id"] = -1
956         else:
957             entry["location id"] = l.location_id
958
959         # Check the md5sum & size against existing files (if any)
960         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
961
962         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
963                                          entry["size"], entry["md5sum"], entry["location id"])
964
965         if found is None:
966             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
967         elif found is False and poolfile is not None:
968             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
969         else:
970             if poolfile is None:
971                 entry["files id"] = None
972             else:
973                 entry["files id"] = poolfile.file_id
974
975         # Check for packages that have moved from one component to another
976         entry['suite'] = suite
977         arch_list = [entry["architecture"], 'all']
978         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
979             [suite], arch_list = arch_list, session = session)
980         if component is not None:
981             entry["othercomponents"] = component
982
983     def check_files(self, action=True):
984         file_keys = self.pkg.files.keys()
985         holding = Holding()
986         cnf = Config()
987
988         if action:
989             cwd = os.getcwd()
990             os.chdir(self.pkg.directory)
991             for f in file_keys:
992                 ret = holding.copy_to_holding(f)
993                 if ret is not None:
994                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
995
996             os.chdir(cwd)
997
998         # check we already know the changes file
999         # [NB: this check must be done post-suite mapping]
1000         base_filename = os.path.basename(self.pkg.changes_file)
1001
1002         session = DBConn().session()
1003
1004         try:
1005             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1006             # if in the pool or in a queue other than unchecked, reject
1007             if (dbc.in_queue is None) \
1008                    or (dbc.in_queue is not None
1009                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1010                 self.rejects.append("%s file already known to dak" % base_filename)
1011         except NoResultFound, e:
1012             # not known, good
1013             pass
1014
1015         has_binaries = False
1016         has_source = False
1017
1018         for f, entry in self.pkg.files.items():
1019             # Ensure the file does not already exist in one of the accepted directories
1020             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1021                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1022                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1023                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1024
1025             if not re_taint_free.match(f):
1026                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1027
1028             # Check the file is readable
1029             if os.access(f, os.R_OK) == 0:
1030                 # When running in -n, copy_to_holding() won't have
1031                 # generated the reject_message, so we need to.
1032                 if action:
1033                     if os.path.exists(f):
1034                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1035                     else:
1036                         # Don't directly reject, mark to check later to deal with orig's
1037                         # we can find in the pool
1038                         self.later_check_files.append(f)
1039                 entry["type"] = "unreadable"
1040                 continue
1041
1042             # If it's byhand skip remaining checks
1043             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1044                 entry["byhand"] = 1
1045                 entry["type"] = "byhand"
1046
1047             # Checks for a binary package...
1048             elif re_isadeb.match(f):
1049                 has_binaries = True
1050                 entry["type"] = "deb"
1051
1052                 # This routine appends to self.rejects/warnings as appropriate
1053                 self.binary_file_checks(f, session)
1054
1055             # Checks for a source package...
1056             elif re_issource.match(f):
1057                 has_source = True
1058
1059                 # This routine appends to self.rejects/warnings as appropriate
1060                 self.source_file_checks(f, session)
1061
1062             # Not a binary or source package?  Assume byhand...
1063             else:
1064                 entry["byhand"] = 1
1065                 entry["type"] = "byhand"
1066
1067             # Per-suite file checks
1068             entry["oldfiles"] = {}
1069             for suite in self.pkg.changes["distribution"].keys():
1070                 self.per_suite_file_checks(f, suite, session)
1071
1072         session.close()
1073
1074         # If the .changes file says it has source, it must have source.
1075         if self.pkg.changes["architecture"].has_key("source"):
1076             if not has_source:
1077                 self.rejects.append("no source found and Architecture line in changes mention source.")
1078
1079             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1080                 self.rejects.append("source only uploads are not supported.")
1081
1082     ###########################################################################
1083     def check_dsc(self, action=True, session=None):
1084         """Returns bool indicating whether or not the source changes are valid"""
1085         # Ensure there is source to check
1086         if not self.pkg.changes["architecture"].has_key("source"):
1087             return True
1088
1089         # Find the .dsc
1090         dsc_filename = None
1091         for f, entry in self.pkg.files.items():
1092             if entry["type"] == "dsc":
1093                 if dsc_filename:
1094                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1095                     return False
1096                 else:
1097                     dsc_filename = f
1098
1099         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1100         if not dsc_filename:
1101             self.rejects.append("source uploads must contain a dsc file")
1102             return False
1103
1104         # Parse the .dsc file
1105         try:
1106             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1107         except CantOpenError:
1108             # if not -n copy_to_holding() will have done this for us...
1109             if not action:
1110                 self.rejects.append("%s: can't read file." % (dsc_filename))
1111         except ParseChangesError, line:
1112             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1113         except InvalidDscError, line:
1114             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1115         except ChangesUnicodeError:
1116             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1117
1118         # Build up the file list of files mentioned by the .dsc
1119         try:
1120             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1121         except NoFilesFieldError:
1122             self.rejects.append("%s: no Files: field." % (dsc_filename))
1123             return False
1124         except UnknownFormatError, format:
1125             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1126             return False
1127         except ParseChangesError, line:
1128             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1129             return False
1130
1131         # Enforce mandatory fields
1132         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1133             if not self.pkg.dsc.has_key(i):
1134                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1135                 return False
1136
1137         # Validate the source and version fields
1138         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1139             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1140         if not re_valid_version.match(self.pkg.dsc["version"]):
1141             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1142
1143         # Only a limited list of source formats are allowed in each suite
1144         for dist in self.pkg.changes["distribution"].keys():
1145             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1146             if self.pkg.dsc["format"] not in allowed:
1147                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1148
1149         # Validate the Maintainer field
1150         try:
1151             # We ignore the return value
1152             fix_maintainer(self.pkg.dsc["maintainer"])
1153         except ParseMaintError, msg:
1154             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1155                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1156
1157         # Validate the build-depends field(s)
1158         for field_name in [ "build-depends", "build-depends-indep" ]:
1159             field = self.pkg.dsc.get(field_name)
1160             if field:
1161                 # Have apt try to parse them...
1162                 try:
1163                     apt_pkg.ParseSrcDepends(field)
1164                 except:
1165                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1166
1167         # Ensure the version number in the .dsc matches the version number in the .changes
1168         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1169         changes_version = self.pkg.files[dsc_filename]["version"]
1170
1171         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1172             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1173
1174         # Ensure the Files field contain only what's expected
1175         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1176
1177         # Ensure source is newer than existing source in target suites
1178         session = DBConn().session()
1179         self.check_source_against_db(dsc_filename, session)
1180         self.check_dsc_against_db(dsc_filename, session)
1181
1182         dbchg = get_dbchange(self.pkg.changes_file, session)
1183
1184         # Finally, check if we're missing any files
1185         for f in self.later_check_files:
1186             print 'XXX: %s' % f
1187             # Check if we've already processed this file if we have a dbchg object
1188             ok = False
1189             if dbchg:
1190                 for pf in dbchg.files:
1191                     if pf.filename == f and pf.processed:
1192                         self.notes.append('%s was already processed so we can go ahead' % f)
1193                         ok = True
1194                         del self.pkg.files[f]
1195             if not ok:
1196                 self.rejects.append("Could not find file %s references in changes" % f)
1197
1198         session.close()
1199
1200         return True
1201
1202     ###########################################################################
1203
1204     def get_changelog_versions(self, source_dir):
1205         """Extracts a the source package and (optionally) grabs the
1206         version history out of debian/changelog for the BTS."""
1207
1208         cnf = Config()
1209
1210         # Find the .dsc (again)
1211         dsc_filename = None
1212         for f in self.pkg.files.keys():
1213             if self.pkg.files[f]["type"] == "dsc":
1214                 dsc_filename = f
1215
1216         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1217         if not dsc_filename:
1218             return
1219
1220         # Create a symlink mirror of the source files in our temporary directory
1221         for f in self.pkg.files.keys():
1222             m = re_issource.match(f)
1223             if m:
1224                 src = os.path.join(source_dir, f)
1225                 # If a file is missing for whatever reason, give up.
1226                 if not os.path.exists(src):
1227                     return
1228                 ftype = m.group(3)
1229                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1230                    self.pkg.orig_files[f].has_key("path"):
1231                     continue
1232                 dest = os.path.join(os.getcwd(), f)
1233                 os.symlink(src, dest)
1234
1235         # If the orig files are not a part of the upload, create symlinks to the
1236         # existing copies.
1237         for orig_file in self.pkg.orig_files.keys():
1238             if not self.pkg.orig_files[orig_file].has_key("path"):
1239                 continue
1240             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1241             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1242
1243         # Extract the source
1244         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1245         (result, output) = commands.getstatusoutput(cmd)
1246         if (result != 0):
1247             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1248             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1249             return
1250
1251         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1252             return
1253
1254         # Get the upstream version
1255         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1256         if re_strip_revision.search(upstr_version):
1257             upstr_version = re_strip_revision.sub('', upstr_version)
1258
1259         # Ensure the changelog file exists
1260         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1261         if not os.path.exists(changelog_filename):
1262             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1263             return
1264
1265         # Parse the changelog
1266         self.pkg.dsc["bts changelog"] = ""
1267         changelog_file = utils.open_file(changelog_filename)
1268         for line in changelog_file.readlines():
1269             m = re_changelog_versions.match(line)
1270             if m:
1271                 self.pkg.dsc["bts changelog"] += line
1272         changelog_file.close()
1273
1274         # Check we found at least one revision in the changelog
1275         if not self.pkg.dsc["bts changelog"]:
1276             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1277
1278     def check_source(self):
1279         # Bail out if:
1280         #    a) there's no source
1281         if not self.pkg.changes["architecture"].has_key("source"):
1282             return
1283
1284         tmpdir = utils.temp_dirname()
1285
1286         # Move into the temporary directory
1287         cwd = os.getcwd()
1288         os.chdir(tmpdir)
1289
1290         # Get the changelog version history
1291         self.get_changelog_versions(cwd)
1292
1293         # Move back and cleanup the temporary tree
1294         os.chdir(cwd)
1295
1296         try:
1297             shutil.rmtree(tmpdir)
1298         except OSError, e:
1299             if e.errno != errno.EACCES:
1300                 print "foobar"
1301                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1302
1303             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1304             # We probably have u-r or u-w directories so chmod everything
1305             # and try again.
1306             cmd = "chmod -R u+rwx %s" % (tmpdir)
1307             result = os.system(cmd)
1308             if result != 0:
1309                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1310             shutil.rmtree(tmpdir)
1311         except Exception, e:
1312             print "foobar2 (%s)" % e
1313             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1314
1315     ###########################################################################
1316     def ensure_hashes(self):
1317         # Make sure we recognise the format of the Files: field in the .changes
1318         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1319         if len(format) == 2:
1320             format = int(format[0]), int(format[1])
1321         else:
1322             format = int(float(format[0])), 0
1323
1324         # We need to deal with the original changes blob, as the fields we need
1325         # might not be in the changes dict serialised into the .dak anymore.
1326         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1327
1328         # Copy the checksums over to the current changes dict.  This will keep
1329         # the existing modifications to it intact.
1330         for field in orig_changes:
1331             if field.startswith('checksums-'):
1332                 self.pkg.changes[field] = orig_changes[field]
1333
1334         # Check for unsupported hashes
1335         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1336             self.rejects.append(j)
1337
1338         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1339             self.rejects.append(j)
1340
1341         # We have to calculate the hash if we have an earlier changes version than
1342         # the hash appears in rather than require it exist in the changes file
1343         for hashname, hashfunc, version in utils.known_hashes:
1344             # TODO: Move _ensure_changes_hash into this class
1345             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1346                 self.rejects.append(j)
1347             if "source" in self.pkg.changes["architecture"]:
1348                 # TODO: Move _ensure_dsc_hash into this class
1349                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1350                     self.rejects.append(j)
1351
1352     def check_hashes(self):
1353         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1354             self.rejects.append(m)
1355
1356         for m in utils.check_size(".changes", self.pkg.files):
1357             self.rejects.append(m)
1358
1359         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1360             self.rejects.append(m)
1361
1362         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1363             self.rejects.append(m)
1364
1365         self.ensure_hashes()
1366
1367     ###########################################################################
1368
1369     def ensure_orig(self, target_dir='.', session=None):
1370         """
1371         Ensures that all orig files mentioned in the changes file are present
1372         in target_dir. If they do not exist, they are symlinked into place.
1373
1374         An list containing the symlinks that were created are returned (so they
1375         can be removed).
1376         """
1377
1378         symlinked = []
1379         cnf = Config()
1380
1381         for filename, entry in self.pkg.dsc_files.iteritems():
1382             if not re_is_orig_source.match(filename):
1383                 # File is not an orig; ignore
1384                 continue
1385
1386             if os.path.exists(filename):
1387                 # File exists, no need to continue
1388                 continue
1389
1390             def symlink_if_valid(path):
1391                 f = utils.open_file(path)
1392                 md5sum = apt_pkg.md5sum(f)
1393                 f.close()
1394
1395                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1396                 expected = (int(entry['size']), entry['md5sum'])
1397
1398                 if fingerprint != expected:
1399                     return False
1400
1401                 dest = os.path.join(target_dir, filename)
1402
1403                 os.symlink(path, dest)
1404                 symlinked.append(dest)
1405
1406                 return True
1407
1408             session_ = session
1409             if session is None:
1410                 session_ = DBConn().session()
1411
1412             found = False
1413
1414             # Look in the pool
1415             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1416                 poolfile_path = os.path.join(
1417                     poolfile.location.path, poolfile.filename
1418                 )
1419
1420                 if symlink_if_valid(poolfile_path):
1421                     found = True
1422                     break
1423
1424             if session is None:
1425                 session_.close()
1426
1427             if found:
1428                 continue
1429
1430             # Look in some other queues for the file
1431             queues = ('New', 'Byhand', 'ProposedUpdates',
1432                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1433
1434             for queue in queues:
1435                 if not cnf.get('Dir::Queue::%s' % queue):
1436                     continue
1437
1438                 queuefile_path = os.path.join(
1439                     cnf['Dir::Queue::%s' % queue], filename
1440                 )
1441
1442                 if not os.path.exists(queuefile_path):
1443                     # Does not exist in this queue
1444                     continue
1445
1446                 if symlink_if_valid(queuefile_path):
1447                     break
1448
1449         return symlinked
1450
1451     ###########################################################################
1452
1453     def check_lintian(self):
1454         """
1455         Extends self.rejects by checking the output of lintian against tags
1456         specified in Dinstall::LintianTags.
1457         """
1458
1459         cnf = Config()
1460
1461         # Don't reject binary uploads
1462         if not self.pkg.changes['architecture'].has_key('source'):
1463             return
1464
1465         # Only check some distributions
1466         for dist in ('unstable', 'experimental'):
1467             if dist in self.pkg.changes['distribution']:
1468                 break
1469         else:
1470             return
1471
1472         # If we do not have a tagfile, don't do anything
1473         tagfile = cnf.get("Dinstall::LintianTags")
1474         if tagfile is None:
1475             return
1476
1477         # Parse the yaml file
1478         sourcefile = file(tagfile, 'r')
1479         sourcecontent = sourcefile.read()
1480         sourcefile.close()
1481
1482         try:
1483             lintiantags = yaml.load(sourcecontent)['lintian']
1484         except yaml.YAMLError, msg:
1485             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1486             return
1487
1488         # Try and find all orig mentioned in the .dsc
1489         symlinked = self.ensure_orig()
1490
1491         # Setup the input file for lintian
1492         fd, temp_filename = utils.temp_filename()
1493         temptagfile = os.fdopen(fd, 'w')
1494         for tags in lintiantags.values():
1495             temptagfile.writelines(['%s\n' % x for x in tags])
1496         temptagfile.close()
1497
1498         try:
1499             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1500                 (temp_filename, self.pkg.changes_file)
1501
1502             result, output = commands.getstatusoutput(cmd)
1503         finally:
1504             # Remove our tempfile and any symlinks we created
1505             os.unlink(temp_filename)
1506
1507             for symlink in symlinked:
1508                 os.unlink(symlink)
1509
1510         if result == 2:
1511             utils.warn("lintian failed for %s [return code: %s]." % \
1512                 (self.pkg.changes_file, result))
1513             utils.warn(utils.prefix_multi_line_string(output, \
1514                 " [possible output:] "))
1515
1516         def log(*txt):
1517             if self.logger:
1518                 self.logger.log(
1519                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1520                 )
1521
1522         # Generate messages
1523         parsed_tags = parse_lintian_output(output)
1524         self.rejects.extend(
1525             generate_reject_messages(parsed_tags, lintiantags, log=log)
1526         )
1527
1528     ###########################################################################
1529     def check_urgency(self):
1530         cnf = Config()
1531         if self.pkg.changes["architecture"].has_key("source"):
1532             if not self.pkg.changes.has_key("urgency"):
1533                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1534             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1535             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1536                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1537                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1538                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1539
1540     ###########################################################################
1541
1542     # Sanity check the time stamps of files inside debs.
1543     # [Files in the near future cause ugly warnings and extreme time
1544     #  travel can cause errors on extraction]
1545
1546     def check_timestamps(self):
1547         Cnf = Config()
1548
1549         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1550         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1551         tar = TarTime(future_cutoff, past_cutoff)
1552
1553         for filename, entry in self.pkg.files.items():
1554             if entry["type"] == "deb":
1555                 tar.reset()
1556                 try:
1557                     deb_file = utils.open_file(filename)
1558                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1559                     deb_file.seek(0)
1560                     try:
1561                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1562                     except SystemError, e:
1563                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1564                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1565                             raise
1566                         deb_file.seek(0)
1567                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1568
1569                     deb_file.close()
1570
1571                     future_files = tar.future_files.keys()
1572                     if future_files:
1573                         num_future_files = len(future_files)
1574                         future_file = future_files[0]
1575                         future_date = tar.future_files[future_file]
1576                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1577                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1578
1579                     ancient_files = tar.ancient_files.keys()
1580                     if ancient_files:
1581                         num_ancient_files = len(ancient_files)
1582                         ancient_file = ancient_files[0]
1583                         ancient_date = tar.ancient_files[ancient_file]
1584                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1585                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1586                 except:
1587                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1588
1589     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1590         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1591             sponsored = False
1592         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1593             sponsored = False
1594             if uid_name == "":
1595                 sponsored = True
1596         else:
1597             sponsored = True
1598             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1599                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1600                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1601                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1602                         self.pkg.changes["sponsoremail"] = uid_email
1603
1604         return sponsored
1605
1606
1607     ###########################################################################
1608     # check_signed_by_key checks
1609     ###########################################################################
1610
1611     def check_signed_by_key(self):
1612         """Ensure the .changes is signed by an authorized uploader."""
1613         session = DBConn().session()
1614
1615         # First of all we check that the person has proper upload permissions
1616         # and that this upload isn't blocked
1617         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1618
1619         if fpr is None:
1620             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1621             return
1622
1623         # TODO: Check that import-keyring adds UIDs properly
1624         if not fpr.uid:
1625             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1626             return
1627
1628         # Check that the fingerprint which uploaded has permission to do so
1629         self.check_upload_permissions(fpr, session)
1630
1631         # Check that this package is not in a transition
1632         self.check_transition(session)
1633
1634         session.close()
1635
1636
1637     def check_upload_permissions(self, fpr, session):
1638         # Check any one-off upload blocks
1639         self.check_upload_blocks(fpr, session)
1640
1641         # Start with DM as a special case
1642         # DM is a special case unfortunately, so we check it first
1643         # (keys with no source access get more access than DMs in one
1644         #  way; DMs can only upload for their packages whether source
1645         #  or binary, whereas keys with no access might be able to
1646         #  upload some binaries)
1647         if fpr.source_acl.access_level == 'dm':
1648             self.check_dm_upload(fpr, session)
1649         else:
1650             # Check source-based permissions for other types
1651             if self.pkg.changes["architecture"].has_key("source") and \
1652                 fpr.source_acl.access_level is None:
1653                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1654                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1655                 self.rejects.append(rej)
1656                 return
1657             # If not a DM, we allow full upload rights
1658             uid_email = "%s@debian.org" % (fpr.uid.uid)
1659             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1660
1661
1662         # Check binary upload permissions
1663         # By this point we know that DMs can't have got here unless they
1664         # are allowed to deal with the package concerned so just apply
1665         # normal checks
1666         if fpr.binary_acl.access_level == 'full':
1667             return
1668
1669         # Otherwise we're in the map case
1670         tmparches = self.pkg.changes["architecture"].copy()
1671         tmparches.pop('source', None)
1672
1673         for bam in fpr.binary_acl_map:
1674             tmparches.pop(bam.architecture.arch_string, None)
1675
1676         if len(tmparches.keys()) > 0:
1677             if fpr.binary_reject:
1678                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1679                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1680                 self.rejects.append(rej)
1681             else:
1682                 # TODO: This is where we'll implement reject vs throw away binaries later
1683                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1684                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1685                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1686                 self.rejects.append(rej)
1687
1688
1689     def check_upload_blocks(self, fpr, session):
1690         """Check whether any upload blocks apply to this source, source
1691            version, uid / fpr combination"""
1692
1693         def block_rej_template(fb):
1694             rej = 'Manual upload block in place for package %s' % fb.source
1695             if fb.version is not None:
1696                 rej += ', version %s' % fb.version
1697             return rej
1698
1699         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1700             # version is None if the block applies to all versions
1701             if fb.version is None or fb.version == self.pkg.changes['version']:
1702                 # Check both fpr and uid - either is enough to cause a reject
1703                 if fb.fpr is not None:
1704                     if fb.fpr.fingerprint == fpr.fingerprint:
1705                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1706                 if fb.uid is not None:
1707                     if fb.uid == fpr.uid:
1708                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1709
1710
1711     def check_dm_upload(self, fpr, session):
1712         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1713         ## none of the uploaded packages are NEW
1714         rej = False
1715         for f in self.pkg.files.keys():
1716             if self.pkg.files[f].has_key("byhand"):
1717                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1718                 rej = True
1719             if self.pkg.files[f].has_key("new"):
1720                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1721                 rej = True
1722
1723         if rej:
1724             return
1725
1726         r = get_newest_source(self.pkg.changes["source"], session)
1727
1728         if r is None:
1729             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1730             self.rejects.append(rej)
1731             return
1732
1733         if not r.dm_upload_allowed:
1734             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1735             self.rejects.append(rej)
1736             return
1737
1738         ## the Maintainer: field of the uploaded .changes file corresponds with
1739         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1740         ## uploads)
1741         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1742             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1743
1744         ## the most recent version of the package uploaded to unstable or
1745         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1746         ## non-developer maintainers cannot NMU or hijack packages)
1747
1748         # srcuploaders includes the maintainer
1749         accept = False
1750         for sup in r.srcuploaders:
1751             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1752             # Eww - I hope we never have two people with the same name in Debian
1753             if email == fpr.uid.uid or name == fpr.uid.name:
1754                 accept = True
1755                 break
1756
1757         if not accept:
1758             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1759             return
1760
1761         ## none of the packages are being taken over from other source packages
1762         for b in self.pkg.changes["binary"].keys():
1763             for suite in self.pkg.changes["distribution"].keys():
1764                 for s in get_source_by_package_and_suite(b, suite, session):
1765                     if s.source != self.pkg.changes["source"]:
1766                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1767
1768
1769
1770     def check_transition(self, session):
1771         cnf = Config()
1772
1773         sourcepkg = self.pkg.changes["source"]
1774
1775         # No sourceful upload -> no need to do anything else, direct return
1776         # We also work with unstable uploads, not experimental or those going to some
1777         # proposed-updates queue
1778         if "source" not in self.pkg.changes["architecture"] or \
1779            "unstable" not in self.pkg.changes["distribution"]:
1780             return
1781
1782         # Also only check if there is a file defined (and existant) with
1783         # checks.
1784         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1785         if transpath == "" or not os.path.exists(transpath):
1786             return
1787
1788         # Parse the yaml file
1789         sourcefile = file(transpath, 'r')
1790         sourcecontent = sourcefile.read()
1791         try:
1792             transitions = yaml.load(sourcecontent)
1793         except yaml.YAMLError, msg:
1794             # This shouldn't happen, there is a wrapper to edit the file which
1795             # checks it, but we prefer to be safe than ending up rejecting
1796             # everything.
1797             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1798             return
1799
1800         # Now look through all defined transitions
1801         for trans in transitions:
1802             t = transitions[trans]
1803             source = t["source"]
1804             expected = t["new"]
1805
1806             # Will be None if nothing is in testing.
1807             current = get_source_in_suite(source, "testing", session)
1808             if current is not None:
1809                 compare = apt_pkg.VersionCompare(current.version, expected)
1810
1811             if current is None or compare < 0:
1812                 # This is still valid, the current version in testing is older than
1813                 # the new version we wait for, or there is none in testing yet
1814
1815                 # Check if the source we look at is affected by this.
1816                 if sourcepkg in t['packages']:
1817                     # The source is affected, lets reject it.
1818
1819                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1820                         sourcepkg, trans)
1821
1822                     if current is not None:
1823                         currentlymsg = "at version %s" % (current.version)
1824                     else:
1825                         currentlymsg = "not present in testing"
1826
1827                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1828
1829                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1830 is part of a testing transition designed to get %s migrated (it is
1831 currently %s, we need version %s).  This transition is managed by the
1832 Release Team, and %s is the Release-Team member responsible for it.
1833 Please mail debian-release@lists.debian.org or contact %s directly if you
1834 need further assistance.  You might want to upload to experimental until this
1835 transition is done."""
1836                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1837
1838                     self.rejects.append(rejectmsg)
1839                     return
1840
1841     ###########################################################################
1842     # End check_signed_by_key checks
1843     ###########################################################################
1844
1845     def build_summaries(self):
1846         """ Build a summary of changes the upload introduces. """
1847
1848         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1849
1850         short_summary = summary
1851
1852         # This is for direport's benefit...
1853         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1854
1855         if byhand or new:
1856             summary += "Changes: " + f
1857
1858         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1859
1860         summary += self.announce(short_summary, 0)
1861
1862         return (summary, short_summary)
1863
1864     ###########################################################################
1865
1866     def close_bugs(self, summary, action):
1867         """
1868         Send mail to close bugs as instructed by the closes field in the changes file.
1869         Also add a line to summary if any work was done.
1870
1871         @type summary: string
1872         @param summary: summary text, as given by L{build_summaries}
1873
1874         @type action: bool
1875         @param action: Set to false no real action will be done.
1876
1877         @rtype: string
1878         @return: summary. If action was taken, extended by the list of closed bugs.
1879
1880         """
1881
1882         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1883
1884         bugs = self.pkg.changes["closes"].keys()
1885
1886         if not bugs:
1887             return summary
1888
1889         bugs.sort()
1890         summary += "Closing bugs: "
1891         for bug in bugs:
1892             summary += "%s " % (bug)
1893             if action:
1894                 self.update_subst()
1895                 self.Subst["__BUG_NUMBER__"] = bug
1896                 if self.pkg.changes["distribution"].has_key("stable"):
1897                     self.Subst["__STABLE_WARNING__"] = """
1898 Note that this package is not part of the released stable Debian
1899 distribution.  It may have dependencies on other unreleased software,
1900 or other instabilities.  Please take care if you wish to install it.
1901 The update will eventually make its way into the next released Debian
1902 distribution."""
1903                 else:
1904                     self.Subst["__STABLE_WARNING__"] = ""
1905                 mail_message = utils.TemplateSubst(self.Subst, template)
1906                 utils.send_mail(mail_message)
1907
1908                 # Clear up after ourselves
1909                 del self.Subst["__BUG_NUMBER__"]
1910                 del self.Subst["__STABLE_WARNING__"]
1911
1912         if action and self.logger:
1913             self.logger.log(["closing bugs"] + bugs)
1914
1915         summary += "\n"
1916
1917         return summary
1918
1919     ###########################################################################
1920
1921     def announce(self, short_summary, action):
1922         """
1923         Send an announce mail about a new upload.
1924
1925         @type short_summary: string
1926         @param short_summary: Short summary text to include in the mail
1927
1928         @type action: bool
1929         @param action: Set to false no real action will be done.
1930
1931         @rtype: string
1932         @return: Textstring about action taken.
1933
1934         """
1935
1936         cnf = Config()
1937         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1938
1939         # Only do announcements for source uploads with a recent dpkg-dev installed
1940         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1941            self.pkg.changes["architecture"].has_key("source"):
1942             return ""
1943
1944         lists_done = {}
1945         summary = ""
1946
1947         self.Subst["__SHORT_SUMMARY__"] = short_summary
1948
1949         for dist in self.pkg.changes["distribution"].keys():
1950             suite = get_suite(dist)
1951             if suite is None: continue
1952             announce_list = suite.announce
1953             if announce_list == "" or lists_done.has_key(announce_list):
1954                 continue
1955
1956             lists_done[announce_list] = 1
1957             summary += "Announcing to %s\n" % (announce_list)
1958
1959             if action:
1960                 self.update_subst()
1961                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1962                 if cnf.get("Dinstall::TrackingServer") and \
1963                    self.pkg.changes["architecture"].has_key("source"):
1964                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1965                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1966
1967                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1968                 utils.send_mail(mail_message)
1969
1970                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1971
1972         if cnf.FindB("Dinstall::CloseBugs"):
1973             summary = self.close_bugs(summary, action)
1974
1975         del self.Subst["__SHORT_SUMMARY__"]
1976
1977         return summary
1978
1979     ###########################################################################
1980     @session_wrapper
1981     def accept (self, summary, short_summary, session=None):
1982         """
1983         Accept an upload.
1984
1985         This moves all files referenced from the .changes into the pool,
1986         sends the accepted mail, announces to lists, closes bugs and
1987         also checks for override disparities. If enabled it will write out
1988         the version history for the BTS Version Tracking and will finally call
1989         L{queue_build}.
1990
1991         @type summary: string
1992         @param summary: Summary text
1993
1994         @type short_summary: string
1995         @param short_summary: Short summary
1996         """
1997
1998         cnf = Config()
1999         stats = SummaryStats()
2000
2001         print "Installing."
2002         self.logger.log(["installing changes", self.pkg.changes_file])
2003
2004         poolfiles = []
2005
2006         # Add the .dsc file to the DB first
2007         for newfile, entry in self.pkg.files.items():
2008             if entry["type"] == "dsc":
2009                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2010                 for j in pfs:
2011                     poolfiles.append(j)
2012
2013         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2014         for newfile, entry in self.pkg.files.items():
2015             if entry["type"] == "deb":
2016                 poolfiles.append(add_deb_to_db(self, newfile, session))
2017
2018         # If this is a sourceful diff only upload that is moving
2019         # cross-component we need to copy the .orig files into the new
2020         # component too for the same reasons as above.
2021         # XXX: mhy: I think this should be in add_dsc_to_db
2022         if self.pkg.changes["architecture"].has_key("source"):
2023             for orig_file in self.pkg.orig_files.keys():
2024                 if not self.pkg.orig_files[orig_file].has_key("id"):
2025                     continue # Skip if it's not in the pool
2026                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2027                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2028                     continue # Skip if the location didn't change
2029
2030                 # Do the move
2031                 oldf = get_poolfile_by_id(orig_file_id, session)
2032                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2033                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2034                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2035
2036                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2037
2038                 # TODO: Care about size/md5sum collisions etc
2039                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2040
2041                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2042                 if newf is None:
2043                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2044                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2045
2046                     session.flush()
2047
2048                     # Don't reference the old file from this changes
2049                     for p in poolfiles:
2050                         if p.file_id == oldf.file_id:
2051                             poolfiles.remove(p)
2052
2053                     poolfiles.append(newf)
2054
2055                     # Fix up the DSC references
2056                     toremove = []
2057
2058                     for df in source.srcfiles:
2059                         if df.poolfile.file_id == oldf.file_id:
2060                             # Add a new DSC entry and mark the old one for deletion
2061                             # Don't do it in the loop so we don't change the thing we're iterating over
2062                             newdscf = DSCFile()
2063                             newdscf.source_id = source.source_id
2064                             newdscf.poolfile_id = newf.file_id
2065                             session.add(newdscf)
2066
2067                             toremove.append(df)
2068
2069                     for df in toremove:
2070                         session.delete(df)
2071
2072                     # Flush our changes
2073                     session.flush()
2074
2075                     # Make sure that our source object is up-to-date
2076                     session.expire(source)
2077
2078         # Add changelog information to the database
2079         self.store_changelog()
2080
2081         # Install the files into the pool
2082         for newfile, entry in self.pkg.files.items():
2083             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2084             utils.move(newfile, destination)
2085             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2086             stats.accept_bytes += float(entry["size"])
2087
2088         # Copy the .changes file across for suite which need it.
2089         copy_changes = dict([(x.copychanges, '')
2090                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2091                              if x.copychanges is not None])
2092
2093         for dest in copy_changes.keys():
2094             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2095
2096         # We're done - commit the database changes
2097         session.commit()
2098         # Our SQL session will automatically start a new transaction after
2099         # the last commit
2100
2101         # Move the .changes into the 'done' directory
2102         utils.move(self.pkg.changes_file,
2103                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2104
2105         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2106             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2107
2108         self.update_subst()
2109         self.Subst["__SUMMARY__"] = summary
2110         mail_message = utils.TemplateSubst(self.Subst,
2111                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2112         utils.send_mail(mail_message)
2113         self.announce(short_summary, 1)
2114
2115         ## Helper stuff for DebBugs Version Tracking
2116         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2117             if self.pkg.changes["architecture"].has_key("source"):
2118                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2119                 version_history = os.fdopen(fd, 'w')
2120                 version_history.write(self.pkg.dsc["bts changelog"])
2121                 version_history.close()
2122                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2123                                       self.pkg.changes_file[:-8]+".versions")
2124                 os.rename(temp_filename, filename)
2125                 os.chmod(filename, 0644)
2126
2127             # Write out the binary -> source mapping.
2128             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2129             debinfo = os.fdopen(fd, 'w')
2130             for name, entry in sorted(self.pkg.files.items()):
2131                 if entry["type"] == "deb":
2132                     line = " ".join([entry["package"], entry["version"],
2133                                      entry["architecture"], entry["source package"],
2134                                      entry["source version"]])
2135                     debinfo.write(line+"\n")
2136             debinfo.close()
2137             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2138                                   self.pkg.changes_file[:-8]+".debinfo")
2139             os.rename(temp_filename, filename)
2140             os.chmod(filename, 0644)
2141
2142         session.commit()
2143
2144         # Set up our copy queues (e.g. buildd queues)
2145         for suite_name in self.pkg.changes["distribution"].keys():
2146             suite = get_suite(suite_name, session)
2147             for q in suite.copy_queues:
2148                 for f in poolfiles:
2149                     q.add_file_from_pool(f)
2150
2151         session.commit()
2152
2153         # Finally...
2154         stats.accept_count += 1
2155
2156     def check_override(self):
2157         """
2158         Checks override entries for validity. Mails "Override disparity" warnings,
2159         if that feature is enabled.
2160
2161         Abandons the check if
2162           - override disparity checks are disabled
2163           - mail sending is disabled
2164         """
2165
2166         cnf = Config()
2167
2168         # Abandon the check if override disparity checks have been disabled
2169         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2170             return
2171
2172         summary = self.pkg.check_override()
2173
2174         if summary == "":
2175             return
2176
2177         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2178
2179         self.update_subst()
2180         self.Subst["__SUMMARY__"] = summary
2181         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2182         utils.send_mail(mail_message)
2183         del self.Subst["__SUMMARY__"]
2184
2185     ###########################################################################
2186
2187     def remove(self, from_dir=None):
2188         """
2189         Used (for instance) in p-u to remove the package from unchecked
2190
2191         Also removes the package from holding area.
2192         """
2193         if from_dir is None:
2194             from_dir = self.pkg.directory
2195         h = Holding()
2196
2197         for f in self.pkg.files.keys():
2198             os.unlink(os.path.join(from_dir, f))
2199             if os.path.exists(os.path.join(h.holding_dir, f)):
2200                 os.unlink(os.path.join(h.holding_dir, f))
2201
2202         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2203         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2204             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2205
2206     ###########################################################################
2207
2208     def move_to_queue (self, queue):
2209         """
2210         Move files to a destination queue using the permissions in the table
2211         """
2212         h = Holding()
2213         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2214                    queue.path, perms=int(queue.change_perms, 8))
2215         for f in self.pkg.files.keys():
2216             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2217
2218     ###########################################################################
2219
2220     def force_reject(self, reject_files):
2221         """
2222         Forcefully move files from the current directory to the
2223         reject directory.  If any file already exists in the reject
2224         directory it will be moved to the morgue to make way for
2225         the new file.
2226
2227         @type reject_files: dict
2228         @param reject_files: file dictionary
2229
2230         """
2231
2232         cnf = Config()
2233
2234         for file_entry in reject_files:
2235             # Skip any files which don't exist or which we don't have permission to copy.
2236             if os.access(file_entry, os.R_OK) == 0:
2237                 continue
2238
2239             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2240
2241             try:
2242                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2243             except OSError, e:
2244                 # File exists?  Let's find a new name by adding a number
2245                 if e.errno == errno.EEXIST:
2246                     try:
2247                         dest_file = utils.find_next_free(dest_file, 255)
2248                     except NoFreeFilenameError:
2249                         # Something's either gone badly Pete Tong, or
2250                         # someone is trying to exploit us.
2251                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2252                         return
2253
2254                     # Make sure we really got it
2255                     try:
2256                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2257                     except OSError, e:
2258                         # Likewise
2259                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2260                         return
2261                 else:
2262                     raise
2263             # If we got here, we own the destination file, so we can
2264             # safely overwrite it.
2265             utils.move(file_entry, dest_file, 1, perms=0660)
2266             os.close(dest_fd)
2267
2268     ###########################################################################
2269     def do_reject (self, manual=0, reject_message="", notes=""):
2270         """
2271         Reject an upload. If called without a reject message or C{manual} is
2272         true, spawn an editor so the user can write one.
2273
2274         @type manual: bool
2275         @param manual: manual or automated rejection
2276
2277         @type reject_message: string
2278         @param reject_message: A reject message
2279
2280         @return: 0
2281
2282         """
2283         # If we weren't given a manual rejection message, spawn an
2284         # editor so the user can add one in...
2285         if manual and not reject_message:
2286             (fd, temp_filename) = utils.temp_filename()
2287             temp_file = os.fdopen(fd, 'w')
2288             if len(notes) > 0:
2289                 for note in notes:
2290                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2291                                     % (note.author, note.version, note.notedate, note.comment))
2292             temp_file.close()
2293             editor = os.environ.get("EDITOR","vi")
2294             answer = 'E'
2295             while answer == 'E':
2296                 os.system("%s %s" % (editor, temp_filename))
2297                 temp_fh = utils.open_file(temp_filename)
2298                 reject_message = "".join(temp_fh.readlines())
2299                 temp_fh.close()
2300                 print "Reject message:"
2301                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2302                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2303                 answer = "XXX"
2304                 while prompt.find(answer) == -1:
2305                     answer = utils.our_raw_input(prompt)
2306                     m = re_default_answer.search(prompt)
2307                     if answer == "":
2308                         answer = m.group(1)
2309                     answer = answer[:1].upper()
2310             os.unlink(temp_filename)
2311             if answer == 'A':
2312                 return 1
2313             elif answer == 'Q':
2314                 sys.exit(0)
2315
2316         print "Rejecting.\n"
2317
2318         cnf = Config()
2319
2320         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2321         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2322
2323         # Move all the files into the reject directory
2324         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2325         self.force_reject(reject_files)
2326
2327         # If we fail here someone is probably trying to exploit the race
2328         # so let's just raise an exception ...
2329         if os.path.exists(reason_filename):
2330             os.unlink(reason_filename)
2331         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2332
2333         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2334
2335         self.update_subst()
2336         if not manual:
2337             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2338             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2339             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2340             os.write(reason_fd, reject_message)
2341             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2342         else:
2343             # Build up the rejection email
2344             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2345             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2346             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2347             self.Subst["__REJECT_MESSAGE__"] = ""
2348             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2349             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2350             # Write the rejection email out as the <foo>.reason file
2351             os.write(reason_fd, reject_mail_message)
2352
2353         del self.Subst["__REJECTOR_ADDRESS__"]
2354         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2355         del self.Subst["__CC__"]
2356
2357         os.close(reason_fd)
2358
2359         # Send the rejection mail
2360         utils.send_mail(reject_mail_message)
2361
2362         if self.logger:
2363             self.logger.log(["rejected", self.pkg.changes_file])
2364
2365         return 0
2366
2367     ################################################################################
2368     def in_override_p(self, package, component, suite, binary_type, filename, session):
2369         """
2370         Check if a package already has override entries in the DB
2371
2372         @type package: string
2373         @param package: package name
2374
2375         @type component: string
2376         @param component: database id of the component
2377
2378         @type suite: int
2379         @param suite: database id of the suite
2380
2381         @type binary_type: string
2382         @param binary_type: type of the package
2383
2384         @type filename: string
2385         @param filename: filename we check
2386
2387         @return: the database result. But noone cares anyway.
2388
2389         """
2390
2391         cnf = Config()
2392
2393         if binary_type == "": # must be source
2394             file_type = "dsc"
2395         else:
2396             file_type = binary_type
2397
2398         # Override suite name; used for example with proposed-updates
2399         oldsuite = get_suite(suite, session)
2400         if (not oldsuite is None) and oldsuite.overridesuite:
2401             suite = oldsuite.overridesuite
2402
2403         result = get_override(package, suite, component, file_type, session)
2404
2405         # If checking for a source package fall back on the binary override type
2406         if file_type == "dsc" and len(result) < 1:
2407             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2408
2409         # Remember the section and priority so we can check them later if appropriate
2410         if len(result) > 0:
2411             result = result[0]
2412             self.pkg.files[filename]["override section"] = result.section.section
2413             self.pkg.files[filename]["override priority"] = result.priority.priority
2414             return result
2415
2416         return None
2417
2418     ################################################################################
2419     def get_anyversion(self, sv_list, suite):
2420         """
2421         @type sv_list: list
2422         @param sv_list: list of (suite, version) tuples to check
2423
2424         @type suite: string
2425         @param suite: suite name
2426
2427         Description: TODO
2428         """
2429         Cnf = Config()
2430         anyversion = None
2431         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2432         for (s, v) in sv_list:
2433             if s in [ x.lower() for x in anysuite ]:
2434                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2435                     anyversion = v
2436
2437         return anyversion
2438
2439     ################################################################################
2440
2441     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2442         """
2443         @type sv_list: list
2444         @param sv_list: list of (suite, version) tuples to check
2445
2446         @type filename: string
2447         @param filename: XXX
2448
2449         @type new_version: string
2450         @param new_version: XXX
2451
2452         Ensure versions are newer than existing packages in target
2453         suites and that cross-suite version checking rules as
2454         set out in the conf file are satisfied.
2455         """
2456
2457         cnf = Config()
2458
2459         # Check versions for each target suite
2460         for target_suite in self.pkg.changes["distribution"].keys():
2461             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2462             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2463
2464             # Enforce "must be newer than target suite" even if conffile omits it
2465             if target_suite not in must_be_newer_than:
2466                 must_be_newer_than.append(target_suite)
2467
2468             for (suite, existent_version) in sv_list:
2469                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2470
2471                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2472                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2473
2474                 if suite in must_be_older_than and vercmp > -1:
2475                     cansave = 0
2476
2477                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2478                         # we really use the other suite, ignoring the conflicting one ...
2479                         addsuite = self.pkg.changes["distribution-version"][suite]
2480
2481                         add_version = self.get_anyversion(sv_list, addsuite)
2482                         target_version = self.get_anyversion(sv_list, target_suite)
2483
2484                         if not add_version:
2485                             # not add_version can only happen if we map to a suite
2486                             # that doesn't enhance the suite we're propup'ing from.
2487                             # so "propup-ver x a b c; map a d" is a problem only if
2488                             # d doesn't enhance a.
2489                             #
2490                             # i think we could always propagate in this case, rather
2491                             # than complaining. either way, this isn't a REJECT issue
2492                             #
2493                             # And - we really should complain to the dorks who configured dak
2494                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2495                             self.pkg.changes.setdefault("propdistribution", {})
2496                             self.pkg.changes["propdistribution"][addsuite] = 1
2497                             cansave = 1
2498                         elif not target_version:
2499                             # not targets_version is true when the package is NEW
2500                             # we could just stick with the "...old version..." REJECT
2501                             # for this, I think.
2502                             self.rejects.append("Won't propogate NEW packages.")
2503                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2504                             # propogation would be redundant. no need to reject though.
2505                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2506                             cansave = 1
2507                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2508                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2509                             # propogate!!
2510                             self.warnings.append("Propogating upload to %s" % (addsuite))
2511                             self.pkg.changes.setdefault("propdistribution", {})
2512                             self.pkg.changes["propdistribution"][addsuite] = 1
2513                             cansave = 1
2514
2515                     if not cansave:
2516                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2517
2518     ################################################################################
2519     def check_binary_against_db(self, filename, session):
2520         # Ensure version is sane
2521         self.cross_suite_version_check( \
2522             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2523                 self.pkg.files[filename]["architecture"], session),
2524             filename, self.pkg.files[filename]["version"], sourceful=False)
2525
2526         # Check for any existing copies of the file
2527         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2528         q = q.filter_by(version=self.pkg.files[filename]["version"])
2529         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2530
2531         if q.count() > 0:
2532             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2533
2534     ################################################################################
2535
2536     def check_source_against_db(self, filename, session):
2537         source = self.pkg.dsc.get("source")
2538         version = self.pkg.dsc.get("version")
2539
2540         # Ensure version is sane
2541         self.cross_suite_version_check( \
2542             get_suite_version_by_source(source, session), filename, version,
2543             sourceful=True)
2544
2545     ################################################################################
2546     def check_dsc_against_db(self, filename, session):
2547         """
2548
2549         @warning: NB: this function can remove entries from the 'files' index [if
2550          the orig tarball is a duplicate of the one in the archive]; if
2551          you're iterating over 'files' and call this function as part of
2552          the loop, be sure to add a check to the top of the loop to
2553          ensure you haven't just tried to dereference the deleted entry.
2554
2555         """
2556
2557         Cnf = Config()
2558         self.pkg.orig_files = {} # XXX: do we need to clear it?
2559         orig_files = self.pkg.orig_files
2560
2561         # Try and find all files mentioned in the .dsc.  This has
2562         # to work harder to cope with the multiple possible
2563         # locations of an .orig.tar.gz.
2564         # The ordering on the select is needed to pick the newest orig
2565         # when it exists in multiple places.
2566         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2567             found = None
2568             if self.pkg.files.has_key(dsc_name):
2569                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2570                 actual_size = int(self.pkg.files[dsc_name]["size"])
2571                 found = "%s in incoming" % (dsc_name)
2572
2573                 # Check the file does not already exist in the archive
2574                 ql = get_poolfile_like_name(dsc_name, session)
2575
2576                 # Strip out anything that isn't '%s' or '/%s$'
2577                 for i in ql:
2578                     if not i.filename.endswith(dsc_name):
2579                         ql.remove(i)
2580
2581                 # "[dak] has not broken them.  [dak] has fixed a
2582                 # brokenness.  Your crappy hack exploited a bug in
2583                 # the old dinstall.
2584                 #
2585                 # "(Come on!  I thought it was always obvious that
2586                 # one just doesn't release different files with
2587                 # the same name and version.)"
2588                 #                        -- ajk@ on d-devel@l.d.o
2589
2590                 if len(ql) > 0:
2591                     # Ignore exact matches for .orig.tar.gz
2592                     match = 0
2593                     if re_is_orig_source.match(dsc_name):
2594                         for i in ql:
2595                             if self.pkg.files.has_key(dsc_name) and \
2596                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2597                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2598                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2599                                 # TODO: Don't delete the entry, just mark it as not needed
2600                                 # This would fix the stupidity of changing something we often iterate over
2601                                 # whilst we're doing it
2602                                 del self.pkg.files[dsc_name]
2603                                 dsc_entry["files id"] = i.file_id
2604                                 if not orig_files.has_key(dsc_name):
2605                                     orig_files[dsc_name] = {}
2606                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2607                                 match = 1
2608
2609                                 # Don't bitch that we couldn't find this file later
2610                                 try:
2611                                     self.later_check_files.remove(dsc_name)
2612                                 except ValueError:
2613                                     pass
2614
2615
2616                     if not match:
2617                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2618
2619             elif re_is_orig_source.match(dsc_name):
2620                 # Check in the pool
2621                 ql = get_poolfile_like_name(dsc_name, session)
2622
2623                 # Strip out anything that isn't '%s' or '/%s$'
2624                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2625                 for i in ql:
2626                     if not i.filename.endswith(dsc_name):
2627                         ql.remove(i)
2628
2629                 if len(ql) > 0:
2630                     # Unfortunately, we may get more than one match here if,
2631                     # for example, the package was in potato but had an -sa
2632                     # upload in woody.  So we need to choose the right one.
2633
2634                     # default to something sane in case we don't match any or have only one
2635                     x = ql[0]
2636
2637                     if len(ql) > 1:
2638                         for i in ql:
2639                             old_file = os.path.join(i.location.path, i.filename)
2640                             old_file_fh = utils.open_file(old_file)
2641                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2642                             old_file_fh.close()
2643                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2644                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2645                                 x = i
2646
2647                     old_file = os.path.join(i.location.path, i.filename)
2648                     old_file_fh = utils.open_file(old_file)
2649                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2650                     old_file_fh.close()
2651                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2652                     found = old_file
2653                     suite_type = x.location.archive_type
2654                     # need this for updating dsc_files in install()
2655                     dsc_entry["files id"] = x.file_id
2656                     # See install() in process-accepted...
2657                     if not orig_files.has_key(dsc_name):
2658                         orig_files[dsc_name] = {}
2659                     orig_files[dsc_name]["id"] = x.file_id
2660                     orig_files[dsc_name]["path"] = old_file
2661                     orig_files[dsc_name]["location"] = x.location.location_id
2662                 else:
2663                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2664                     # Not there? Check the queue directories...
2665                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2666                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2667                             continue
2668                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2669                         if os.path.exists(in_otherdir):
2670                             in_otherdir_fh = utils.open_file(in_otherdir)
2671                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2672                             in_otherdir_fh.close()
2673                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2674                             found = in_otherdir
2675                             if not orig_files.has_key(dsc_name):
2676                                 orig_files[dsc_name] = {}
2677                             orig_files[dsc_name]["path"] = in_otherdir
2678
2679                     if not found:
2680                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2681                         continue
2682             else:
2683                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2684                 continue
2685             if actual_md5 != dsc_entry["md5sum"]:
2686                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2687             if actual_size != int(dsc_entry["size"]):
2688                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2689
2690     ################################################################################
2691     # This is used by process-new and process-holding to recheck a changes file
2692     # at the time we're running.  It mainly wraps various other internal functions
2693     # and is similar to accepted_checks - these should probably be tidied up
2694     # and combined
2695     def recheck(self, session):
2696         cnf = Config()
2697         for f in self.pkg.files.keys():
2698             # The .orig.tar.gz can disappear out from under us is it's a
2699             # duplicate of one in the archive.
2700             if not self.pkg.files.has_key(f):
2701                 continue
2702
2703             entry = self.pkg.files[f]
2704
2705             # Check that the source still exists
2706             if entry["type"] == "deb":
2707                 source_version = entry["source version"]
2708                 source_package = entry["source package"]
2709                 if not self.pkg.changes["architecture"].has_key("source") \
2710                    and not source_exists(source_package, source_version, \
2711                     suites = self.pkg.changes["distribution"].keys(), session = session):
2712                     source_epochless_version = re_no_epoch.sub('', source_version)
2713                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2714                     found = False
2715                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2716                         if cnf.has_key("Dir::Queue::%s" % (q)):
2717                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2718                                 found = True
2719                     if not found:
2720                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2721
2722             # Version and file overwrite checks
2723             if entry["type"] == "deb":
2724                 self.check_binary_against_db(f, session)
2725             elif entry["type"] == "dsc":
2726                 self.check_source_against_db(f, session)
2727                 self.check_dsc_against_db(f, session)
2728
2729     ################################################################################
2730     def accepted_checks(self, overwrite_checks, session):
2731         # Recheck anything that relies on the database; since that's not
2732         # frozen between accept and our run time when called from p-a.
2733
2734         # overwrite_checks is set to False when installing to stable/oldstable
2735
2736         propogate={}
2737         nopropogate={}
2738
2739         # Find the .dsc (again)
2740         dsc_filename = None
2741         for f in self.pkg.files.keys():
2742             if self.pkg.files[f]["type"] == "dsc":
2743                 dsc_filename = f
2744
2745         for checkfile in self.pkg.files.keys():
2746             # The .orig.tar.gz can disappear out from under us is it's a
2747             # duplicate of one in the archive.
2748             if not self.pkg.files.has_key(checkfile):
2749                 continue
2750
2751             entry = self.pkg.files[checkfile]
2752
2753             # Check that the source still exists
2754             if entry["type"] == "deb":
2755                 source_version = entry["source version"]
2756                 source_package = entry["source package"]
2757                 if not self.pkg.changes["architecture"].has_key("source") \
2758                    and not source_exists(source_package, source_version, \
2759                     suites = self.pkg.changes["distribution"].keys(), \
2760                     session = session):
2761                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2762
2763             # Version and file overwrite checks
2764             if overwrite_checks:
2765                 if entry["type"] == "deb":
2766                     self.check_binary_against_db(checkfile, session)
2767                 elif entry["type"] == "dsc":
2768                     self.check_source_against_db(checkfile, session)
2769                     self.check_dsc_against_db(dsc_filename, session)
2770
2771             # propogate in the case it is in the override tables:
2772             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2773                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2774                     propogate[suite] = 1
2775                 else:
2776                     nopropogate[suite] = 1
2777
2778         for suite in propogate.keys():
2779             if suite in nopropogate:
2780                 continue
2781             self.pkg.changes["distribution"][suite] = 1
2782
2783         for checkfile in self.pkg.files.keys():
2784             # Check the package is still in the override tables
2785             for suite in self.pkg.changes["distribution"].keys():
2786                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2787                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2788
2789     ################################################################################
2790     # If any file of an upload has a recent mtime then chances are good
2791     # the file is still being uploaded.
2792
2793     def upload_too_new(self):
2794         cnf = Config()
2795         too_new = False
2796         # Move back to the original directory to get accurate time stamps
2797         cwd = os.getcwd()
2798         os.chdir(self.pkg.directory)
2799         file_list = self.pkg.files.keys()
2800         file_list.extend(self.pkg.dsc_files.keys())
2801         file_list.append(self.pkg.changes_file)
2802         for f in file_list:
2803             try:
2804                 last_modified = time.time()-os.path.getmtime(f)
2805                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2806                     too_new = True
2807                     break
2808             except:
2809                 pass
2810
2811         os.chdir(cwd)
2812         return too_new
2813
2814     def store_changelog(self):
2815
2816         # Skip binary-only upload if it is not a bin-NMU
2817         if not self.pkg.changes['architecture'].has_key('source'):
2818             from daklib.regexes import re_bin_only_nmu
2819             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2820                 return
2821
2822         session = DBConn().session()
2823
2824         # Check if upload already has a changelog entry
2825         query = """SELECT changelog_id FROM changes WHERE source = :source
2826                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2827         if session.execute(query, {'source': self.pkg.changes['source'], \
2828                                    'version': self.pkg.changes['version'], \
2829                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2830             session.commit()
2831             return
2832
2833         # Add current changelog text into changelogs_text table, return created ID
2834         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2835         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2836
2837         # Link ID to the upload available in changes table
2838         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2839                    AND version = :version AND architecture = :architecture"""
2840         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2841                                 'version': self.pkg.changes['version'], \
2842                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2843
2844         session.commit()