]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote-tracking branch 'origin/master' into version-checks
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @type dsc: Upload.Pkg.dsc dict
122     @param dsc: (optional); Dsc dictionary
123
124     @type new: dict
125     @param new: new packages as returned by a previous call to this function, but override information may have changed
126
127     @rtype: dict
128     @return: dictionary of NEW components.
129
130     """
131     # TODO: This should all use the database instead of parsing the changes
132     # file again
133     byhand = {}
134
135     dbchg = get_dbchange(filename, session)
136     if dbchg is None:
137         print "Warning: cannot find changes file in database; won't check byhand"
138
139     # Try to get the Package-Set field from an included .dsc file (if possible).
140     if dsc:
141         for package, entry in build_package_set(dsc, session).items():
142             if not new.has_key(package):
143                 new[package] = entry
144
145     # Build up a list of potentially new things
146     for name, f in files.items():
147         # Keep a record of byhand elements
148         if f["section"] == "byhand":
149             byhand[name] = 1
150             continue
151
152         pkg = f["package"]
153         priority = f["priority"]
154         section = f["section"]
155         file_type = get_type(f, session)
156         component = f["component"]
157
158         if file_type == "dsc":
159             priority = "source"
160
161         if not new.has_key(pkg):
162             new[pkg] = {}
163             new[pkg]["priority"] = priority
164             new[pkg]["section"] = section
165             new[pkg]["type"] = file_type
166             new[pkg]["component"] = component
167             new[pkg]["files"] = []
168         else:
169             old_type = new[pkg]["type"]
170             if old_type != file_type:
171                 # source gets trumped by deb or udeb
172                 if old_type == "dsc":
173                     new[pkg]["priority"] = priority
174                     new[pkg]["section"] = section
175                     new[pkg]["type"] = file_type
176                     new[pkg]["component"] = component
177
178         new[pkg]["files"].append(name)
179
180         if f.has_key("othercomponents"):
181             new[pkg]["othercomponents"] = f["othercomponents"]
182
183     # Fix up the list of target suites
184     cnf = Config()
185     for suite in changes["suite"].keys():
186         oldsuite = get_suite(suite, session)
187         if not oldsuite:
188             print "WARNING: Invalid suite %s found" % suite
189             continue
190
191         if oldsuite.overridesuite:
192             newsuite = get_suite(oldsuite.overridesuite, session)
193
194             if newsuite:
195                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
196                     oldsuite.overridesuite, suite)
197                 del changes["suite"][suite]
198                 changes["suite"][oldsuite.overridesuite] = 1
199             else:
200                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
201                     oldsuite.overridesuite, suite)
202
203     # Check for unprocessed byhand files
204     if dbchg is not None:
205         for b in byhand.keys():
206             # Find the file entry in the database
207             found = False
208             for f in dbchg.files:
209                 if f.filename == b:
210                     found = True
211                     # If it's processed, we can ignore it
212                     if f.processed:
213                         del byhand[b]
214                     break
215
216             if not found:
217                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
218
219     # Check for new stuff
220     for suite in changes["suite"].keys():
221         for pkg in new.keys():
222             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
223             if len(ql) > 0:
224                 for file_entry in new[pkg]["files"]:
225                     if files[file_entry].has_key("new"):
226                         del files[file_entry]["new"]
227                 del new[pkg]
228
229     if warn:
230         for s in ['stable', 'oldstable']:
231             if changes["suite"].has_key(s):
232                 print "WARNING: overrides will be added for %s!" % s
233         for pkg in new.keys():
234             if new[pkg].has_key("othercomponents"):
235                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
236
237     return new, byhand
238
239 ################################################################################
240
241 def check_valid(new, session = None):
242     """
243     Check if section and priority for NEW packages exist in database.
244     Additionally does sanity checks:
245       - debian-installer packages have to be udeb (or source)
246       - non debian-installer packages can not be udeb
247       - source priority can only be assigned to dsc file types
248
249     @type new: dict
250     @param new: Dict of new packages with their section, priority and type.
251
252     """
253     for pkg in new.keys():
254         section_name = new[pkg]["section"]
255         priority_name = new[pkg]["priority"]
256         file_type = new[pkg]["type"]
257
258         section = get_section(section_name, session)
259         if section is None:
260             new[pkg]["section id"] = -1
261         else:
262             new[pkg]["section id"] = section.section_id
263
264         priority = get_priority(priority_name, session)
265         if priority is None:
266             new[pkg]["priority id"] = -1
267         else:
268             new[pkg]["priority id"] = priority.priority_id
269
270         # Sanity checks
271         di = section_name.find("debian-installer") != -1
272
273         # If d-i, we must be udeb and vice-versa
274         if     (di and file_type not in ("udeb", "dsc")) or \
275            (not di and file_type == "udeb"):
276             new[pkg]["section id"] = -1
277
278         # If dsc we need to be source and vice-versa
279         if (priority == "source" and file_type != "dsc") or \
280            (priority != "source" and file_type == "dsc"):
281             new[pkg]["priority id"] = -1
282
283 ###############################################################################
284
285 # Used by Upload.check_timestamps
286 class TarTime(object):
287     def __init__(self, future_cutoff, past_cutoff):
288         self.reset()
289         self.future_cutoff = future_cutoff
290         self.past_cutoff = past_cutoff
291
292     def reset(self):
293         self.future_files = {}
294         self.ancient_files = {}
295
296     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
297         if MTime > self.future_cutoff:
298             self.future_files[Name] = MTime
299         if MTime < self.past_cutoff:
300             self.ancient_files[Name] = MTime
301
302 ###############################################################################
303
304 def prod_maintainer(notes, upload):
305     cnf = Config()
306
307     # Here we prepare an editor and get them ready to prod...
308     (fd, temp_filename) = utils.temp_filename()
309     temp_file = os.fdopen(fd, 'w')
310     for note in notes:
311         temp_file.write(note.comment)
312     temp_file.close()
313     editor = os.environ.get("EDITOR","vi")
314     answer = 'E'
315     while answer == 'E':
316         os.system("%s %s" % (editor, temp_filename))
317         temp_fh = utils.open_file(temp_filename)
318         prod_message = "".join(temp_fh.readlines())
319         temp_fh.close()
320         print "Prod message:"
321         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
322         prompt = "[P]rod, Edit, Abandon, Quit ?"
323         answer = "XXX"
324         while prompt.find(answer) == -1:
325             answer = utils.our_raw_input(prompt)
326             m = re_default_answer.search(prompt)
327             if answer == "":
328                 answer = m.group(1)
329             answer = answer[:1].upper()
330     os.unlink(temp_filename)
331     if answer == 'A':
332         return
333     elif answer == 'Q':
334         end()
335         sys.exit(0)
336     # Otherwise, do the proding...
337     user_email_address = utils.whoami() + " <%s>" % (
338         cnf["Dinstall::MyAdminAddress"])
339
340     Subst = upload.Subst
341
342     Subst["__FROM_ADDRESS__"] = user_email_address
343     Subst["__PROD_MESSAGE__"] = prod_message
344     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
345
346     prod_mail_message = utils.TemplateSubst(
347         Subst,cnf["Dir::Templates"]+"/process-new.prod")
348
349     # Send the prod mail
350     utils.send_mail(prod_mail_message)
351
352     print "Sent prodding message"
353
354 ################################################################################
355
356 def edit_note(note, upload, session, trainee=False):
357     # Write the current data to a temporary file
358     (fd, temp_filename) = utils.temp_filename()
359     editor = os.environ.get("EDITOR","vi")
360     answer = 'E'
361     while answer == 'E':
362         os.system("%s %s" % (editor, temp_filename))
363         temp_file = utils.open_file(temp_filename)
364         newnote = temp_file.read().rstrip()
365         temp_file.close()
366         print "New Note:"
367         print utils.prefix_multi_line_string(newnote,"  ")
368         prompt = "[D]one, Edit, Abandon, Quit ?"
369         answer = "XXX"
370         while prompt.find(answer) == -1:
371             answer = utils.our_raw_input(prompt)
372             m = re_default_answer.search(prompt)
373             if answer == "":
374                 answer = m.group(1)
375             answer = answer[:1].upper()
376     os.unlink(temp_filename)
377     if answer == 'A':
378         return
379     elif answer == 'Q':
380         end()
381         sys.exit(0)
382
383     comment = NewComment()
384     comment.package = upload.pkg.changes["source"]
385     comment.version = upload.pkg.changes["version"]
386     comment.comment = newnote
387     comment.author  = utils.whoami()
388     comment.trainee = trainee
389     session.add(comment)
390     session.commit()
391
392 ###############################################################################
393
394 # suite names DMs can upload to
395 dm_suites = ['unstable', 'experimental']
396
397 def get_newest_source(source, session):
398     'returns the newest DBSource object in dm_suites'
399     ## the most recent version of the package uploaded to unstable or
400     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
401     ## section of its control file
402     q = session.query(DBSource).filter_by(source = source). \
403         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
404         order_by(desc('source.version'))
405     return q.first()
406
407 def get_suite_version_by_source(source, session):
408     'returns a list of tuples (suite_name, version) for source package'
409     q = session.query(Suite.suite_name, DBSource.version). \
410         join(Suite.sources).filter_by(source = source)
411     return q.all()
412
413 def get_source_by_package_and_suite(package, suite_name, session):
414     '''
415     returns a DBSource query filtered by DBBinary.package and this package's
416     suite_name
417     '''
418     return session.query(DBSource). \
419         join(DBSource.binaries).filter_by(package = package). \
420         join(DBBinary.suites).filter_by(suite_name = suite_name)
421
422 def get_suite_version_by_package(package, arch_string, session):
423     '''
424     returns a list of tuples (suite_name, version) for binary package and
425     arch_string
426     '''
427     return session.query(Suite.suite_name, DBBinary.version). \
428         join(Suite.binaries).filter_by(package = package). \
429         join(DBBinary.architecture). \
430         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
431
432 class Upload(object):
433     """
434     Everything that has to do with an upload processed.
435
436     """
437     def __init__(self):
438         self.logger = None
439         self.pkg = Changes()
440         self.reset()
441
442     ###########################################################################
443
444     def reset (self):
445         """ Reset a number of internal variables."""
446
447         # Initialize the substitution template map
448         cnf = Config()
449         self.Subst = {}
450         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
451         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
452         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
453         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
454
455         self.rejects = []
456         self.warnings = []
457         self.notes = []
458
459         self.later_check_files = []
460
461         self.pkg.reset()
462
463     def package_info(self):
464         """
465         Format various messages from this Upload to send to the maintainer.
466         """
467
468         msgs = (
469             ('Reject Reasons', self.rejects),
470             ('Warnings', self.warnings),
471             ('Notes', self.notes),
472         )
473
474         msg = ''
475         for title, messages in msgs:
476             if messages:
477                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
478         msg += '\n\n'
479
480         return msg
481
482     ###########################################################################
483     def update_subst(self):
484         """ Set up the per-package template substitution mappings """
485
486         cnf = Config()
487
488         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
489         if not self.pkg.changes.has_key("architecture") or not \
490            isinstance(self.pkg.changes["architecture"], dict):
491             self.pkg.changes["architecture"] = { "Unknown" : "" }
492
493         # and maintainer2047 may not exist.
494         if not self.pkg.changes.has_key("maintainer2047"):
495             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
496
497         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
498         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
499         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
500
501         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
502         if self.pkg.changes["architecture"].has_key("source") and \
503            self.pkg.changes["changedby822"] != "" and \
504            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
505
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
507             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
509         else:
510             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
511             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
512             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
513
514         # Process policy doesn't set the fingerprint field and I don't want to make it
515         # do it for now as I don't want to have to deal with the case where we accepted
516         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
517         # the meantime so the package will be remarked as rejectable.  Urgh.
518         # TODO: Fix this properly
519         if self.pkg.changes.has_key('fingerprint'):
520             session = DBConn().session()
521             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
522             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
523                 if self.pkg.changes.has_key("sponsoremail"):
524                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
525             session.close()
526
527         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
528             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
529
530         # Apply any global override of the Maintainer field
531         if cnf.get("Dinstall::OverrideMaintainer"):
532             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
533             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
534
535         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
536         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
537         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
538         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
539
540     ###########################################################################
541     def load_changes(self, filename):
542         """
543         Load a changes file and setup a dictionary around it. Also checks for mandantory
544         fields  within.
545
546         @type filename: string
547         @param filename: Changes filename, full path.
548
549         @rtype: boolean
550         @return: whether the changes file was valid or not.  We may want to
551                  reject even if this is True (see what gets put in self.rejects).
552                  This is simply to prevent us even trying things later which will
553                  fail because we couldn't properly parse the file.
554         """
555         Cnf = Config()
556         self.pkg.changes_file = filename
557
558         # Parse the .changes field into a dictionary
559         try:
560             self.pkg.changes.update(parse_changes(filename))
561         except CantOpenError:
562             self.rejects.append("%s: can't read file." % (filename))
563             return False
564         except ParseChangesError, line:
565             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
566             return False
567         except ChangesUnicodeError:
568             self.rejects.append("%s: changes file not proper utf-8" % (filename))
569             return False
570
571         # Parse the Files field from the .changes into another dictionary
572         try:
573             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
574         except ParseChangesError, line:
575             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
576             return False
577         except UnknownFormatError, format:
578             self.rejects.append("%s: unknown format '%s'." % (filename, format))
579             return False
580
581         # Check for mandatory fields
582         for i in ("distribution", "source", "binary", "architecture",
583                   "version", "maintainer", "files", "changes", "description"):
584             if not self.pkg.changes.has_key(i):
585                 # Avoid undefined errors later
586                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
587                 return False
588
589         # Strip a source version in brackets from the source field
590         if re_strip_srcver.search(self.pkg.changes["source"]):
591             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
592
593         # Ensure the source field is a valid package name.
594         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
595             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
596
597         # Split multi-value fields into a lower-level dictionary
598         for i in ("architecture", "distribution", "binary", "closes"):
599             o = self.pkg.changes.get(i, "")
600             if o != "":
601                 del self.pkg.changes[i]
602
603             self.pkg.changes[i] = {}
604
605             for j in o.split():
606                 self.pkg.changes[i][j] = 1
607
608         # Fix the Maintainer: field to be RFC822/2047 compatible
609         try:
610             (self.pkg.changes["maintainer822"],
611              self.pkg.changes["maintainer2047"],
612              self.pkg.changes["maintainername"],
613              self.pkg.changes["maintaineremail"]) = \
614                    fix_maintainer (self.pkg.changes["maintainer"])
615         except ParseMaintError, msg:
616             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
617                    % (filename, self.pkg.changes["maintainer"], msg))
618
619         # ...likewise for the Changed-By: field if it exists.
620         try:
621             (self.pkg.changes["changedby822"],
622              self.pkg.changes["changedby2047"],
623              self.pkg.changes["changedbyname"],
624              self.pkg.changes["changedbyemail"]) = \
625                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
626         except ParseMaintError, msg:
627             self.pkg.changes["changedby822"] = ""
628             self.pkg.changes["changedby2047"] = ""
629             self.pkg.changes["changedbyname"] = ""
630             self.pkg.changes["changedbyemail"] = ""
631
632             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
633                    % (filename, self.pkg.changes["changed-by"], msg))
634
635         # Ensure all the values in Closes: are numbers
636         if self.pkg.changes.has_key("closes"):
637             for i in self.pkg.changes["closes"].keys():
638                 if re_isanum.match (i) == None:
639                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
640
641         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
642         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
643         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
644
645         # Check the .changes is non-empty
646         if not self.pkg.files:
647             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
648             return False
649
650         # Changes was syntactically valid even if we'll reject
651         return True
652
653     ###########################################################################
654
655     def check_distributions(self):
656         "Check and map the Distribution field"
657
658         Cnf = Config()
659
660         # Handle suite mappings
661         for m in Cnf.ValueList("SuiteMappings"):
662             args = m.split()
663             mtype = args[0]
664             if mtype == "map" or mtype == "silent-map":
665                 (source, dest) = args[1:3]
666                 if self.pkg.changes["distribution"].has_key(source):
667                     del self.pkg.changes["distribution"][source]
668                     self.pkg.changes["distribution"][dest] = 1
669                     if mtype != "silent-map":
670                         self.notes.append("Mapping %s to %s." % (source, dest))
671                 if self.pkg.changes.has_key("distribution-version"):
672                     if self.pkg.changes["distribution-version"].has_key(source):
673                         self.pkg.changes["distribution-version"][source]=dest
674             elif mtype == "map-unreleased":
675                 (source, dest) = args[1:3]
676                 if self.pkg.changes["distribution"].has_key(source):
677                     for arch in self.pkg.changes["architecture"].keys():
678                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
679                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
680                             del self.pkg.changes["distribution"][source]
681                             self.pkg.changes["distribution"][dest] = 1
682                             break
683             elif mtype == "ignore":
684                 suite = args[1]
685                 if self.pkg.changes["distribution"].has_key(suite):
686                     del self.pkg.changes["distribution"][suite]
687                     self.warnings.append("Ignoring %s as a target suite." % (suite))
688             elif mtype == "reject":
689                 suite = args[1]
690                 if self.pkg.changes["distribution"].has_key(suite):
691                     self.rejects.append("Uploads to %s are not accepted." % (suite))
692             elif mtype == "propup-version":
693                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
694                 #
695                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
696                 if self.pkg.changes["distribution"].has_key(args[1]):
697                     self.pkg.changes.setdefault("distribution-version", {})
698                     for suite in args[2:]:
699                         self.pkg.changes["distribution-version"][suite] = suite
700
701         # Ensure there is (still) a target distribution
702         if len(self.pkg.changes["distribution"].keys()) < 1:
703             self.rejects.append("No valid distribution remaining.")
704
705         # Ensure target distributions exist
706         for suite in self.pkg.changes["distribution"].keys():
707             if not Cnf.has_key("Suite::%s" % (suite)):
708                 self.rejects.append("Unknown distribution `%s'." % (suite))
709
710     ###########################################################################
711
712     def binary_file_checks(self, f, session):
713         cnf = Config()
714         entry = self.pkg.files[f]
715
716         # Extract package control information
717         deb_file = utils.open_file(f)
718         try:
719             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
720         except:
721             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
722             deb_file.close()
723             # Can't continue, none of the checks on control would work.
724             return
725
726         # Check for mandantory "Description:"
727         deb_file.seek(0)
728         try:
729             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
730         except:
731             self.rejects.append("%s: Missing Description in binary package" % (f))
732             return
733
734         deb_file.close()
735
736         # Check for mandatory fields
737         for field in [ "Package", "Architecture", "Version" ]:
738             if control.Find(field) == None:
739                 # Can't continue
740                 self.rejects.append("%s: No %s field in control." % (f, field))
741                 return
742
743         # Ensure the package name matches the one give in the .changes
744         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
745             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
746
747         # Validate the package field
748         package = control.Find("Package")
749         if not re_valid_pkg_name.match(package):
750             self.rejects.append("%s: invalid package name '%s'." % (f, package))
751
752         # Validate the version field
753         version = control.Find("Version")
754         if not re_valid_version.match(version):
755             self.rejects.append("%s: invalid version number '%s'." % (f, version))
756
757         # Ensure the architecture of the .deb is one we know about.
758         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
759         architecture = control.Find("Architecture")
760         upload_suite = self.pkg.changes["distribution"].keys()[0]
761
762         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
763             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
764             self.rejects.append("Unknown architecture '%s'." % (architecture))
765
766         # Ensure the architecture of the .deb is one of the ones
767         # listed in the .changes.
768         if not self.pkg.changes["architecture"].has_key(architecture):
769             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
770
771         # Sanity-check the Depends field
772         depends = control.Find("Depends")
773         if depends == '':
774             self.rejects.append("%s: Depends field is empty." % (f))
775
776         # Sanity-check the Provides field
777         provides = control.Find("Provides")
778         if provides:
779             provide = re_spacestrip.sub('', provides)
780             if provide == '':
781                 self.rejects.append("%s: Provides field is empty." % (f))
782             prov_list = provide.split(",")
783             for prov in prov_list:
784                 if not re_valid_pkg_name.match(prov):
785                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
786
787         # If there is a Built-Using field, we need to check we can find the
788         # exact source version
789         built_using = control.Find("Built-Using")
790         if built_using:
791             try:
792                 entry["built-using"] = []
793                 for dep in apt_pkg.parse_depends(built_using):
794                     bu_s, bu_v, bu_e = dep[0]
795                     # Check that it's an exact match dependency and we have
796                     # some form of version
797                     if bu_e != "=" or len(bu_v) < 1:
798                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
799                     else:
800                         # Find the source id for this version
801                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
802                         if len(bu_so) != 1:
803                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
804                         else:
805                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
806
807             except ValueError, e:
808                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
809
810
811         # Check the section & priority match those given in the .changes (non-fatal)
812         if     control.Find("Section") and entry["section"] != "" \
813            and entry["section"] != control.Find("Section"):
814             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Section", ""), entry["section"]))
816         if control.Find("Priority") and entry["priority"] != "" \
817            and entry["priority"] != control.Find("Priority"):
818             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
819                                 (f, control.Find("Priority", ""), entry["priority"]))
820
821         entry["package"] = package
822         entry["architecture"] = architecture
823         entry["version"] = version
824         entry["maintainer"] = control.Find("Maintainer", "")
825
826         if f.endswith(".udeb"):
827             self.pkg.files[f]["dbtype"] = "udeb"
828         elif f.endswith(".deb"):
829             self.pkg.files[f]["dbtype"] = "deb"
830         else:
831             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
832
833         entry["source"] = control.Find("Source", entry["package"])
834
835         # Get the source version
836         source = entry["source"]
837         source_version = ""
838
839         if source.find("(") != -1:
840             m = re_extract_src_version.match(source)
841             source = m.group(1)
842             source_version = m.group(2)
843
844         if not source_version:
845             source_version = self.pkg.files[f]["version"]
846
847         entry["source package"] = source
848         entry["source version"] = source_version
849
850         # Ensure the filename matches the contents of the .deb
851         m = re_isadeb.match(f)
852
853         #  package name
854         file_package = m.group(1)
855         if entry["package"] != file_package:
856             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
857                                 (f, file_package, entry["dbtype"], entry["package"]))
858         epochless_version = re_no_epoch.sub('', control.Find("Version"))
859
860         #  version
861         file_version = m.group(2)
862         if epochless_version != file_version:
863             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
864                                 (f, file_version, entry["dbtype"], epochless_version))
865
866         #  architecture
867         file_architecture = m.group(3)
868         if entry["architecture"] != file_architecture:
869             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
870                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
871
872         # Check for existent source
873         source_version = entry["source version"]
874         source_package = entry["source package"]
875         if self.pkg.changes["architecture"].has_key("source"):
876             if source_version != self.pkg.changes["version"]:
877                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
878                                     (source_version, f, self.pkg.changes["version"]))
879         else:
880             # Check in the SQL database
881             if not source_exists(source_package, source_version, suites = \
882                 self.pkg.changes["distribution"].keys(), session = session):
883                 # Check in one of the other directories
884                 source_epochless_version = re_no_epoch.sub('', source_version)
885                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
886                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
887                     entry["byhand"] = 1
888                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
889                     entry["new"] = 1
890                 else:
891                     dsc_file_exists = False
892                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
893                         if cnf.has_key("Dir::Queue::%s" % (myq)):
894                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
895                                 dsc_file_exists = True
896                                 break
897
898                     if not dsc_file_exists:
899                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
900
901         # Check the version and for file overwrites
902         self.check_binary_against_db(f, session)
903
904     def source_file_checks(self, f, session):
905         entry = self.pkg.files[f]
906
907         m = re_issource.match(f)
908         if not m:
909             return
910
911         entry["package"] = m.group(1)
912         entry["version"] = m.group(2)
913         entry["type"] = m.group(3)
914
915         # Ensure the source package name matches the Source filed in the .changes
916         if self.pkg.changes["source"] != entry["package"]:
917             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
918
919         # Ensure the source version matches the version in the .changes file
920         if re_is_orig_source.match(f):
921             changes_version = self.pkg.changes["chopversion2"]
922         else:
923             changes_version = self.pkg.changes["chopversion"]
924
925         if changes_version != entry["version"]:
926             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
927
928         # Ensure the .changes lists source in the Architecture field
929         if not self.pkg.changes["architecture"].has_key("source"):
930             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
931
932         # Check the signature of a .dsc file
933         if entry["type"] == "dsc":
934             # check_signature returns either:
935             #  (None, [list, of, rejects]) or (signature, [])
936             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
937             for j in rejects:
938                 self.rejects.append(j)
939
940         entry["architecture"] = "source"
941
942     def per_suite_file_checks(self, f, suite, session):
943         cnf = Config()
944         entry = self.pkg.files[f]
945
946         # Skip byhand
947         if entry.has_key("byhand"):
948             return
949
950         # Check we have fields we need to do these checks
951         oktogo = True
952         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
953             if not entry.has_key(m):
954                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
955                 oktogo = False
956
957         if not oktogo:
958             return
959
960         # Handle component mappings
961         for m in cnf.ValueList("ComponentMappings"):
962             (source, dest) = m.split()
963             if entry["component"] == source:
964                 entry["original component"] = source
965                 entry["component"] = dest
966
967         # Ensure the component is valid for the target suite
968         if cnf.has_key("Suite:%s::Components" % (suite)) and \
969            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
970             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
971             return
972
973         # Validate the component
974         if not get_component(entry["component"], session):
975             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
976             return
977
978         # See if the package is NEW
979         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
980             entry["new"] = 1
981
982         # Validate the priority
983         if entry["priority"].find('/') != -1:
984             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
985
986         # Determine the location
987         location = cnf["Dir::Pool"]
988         l = get_location(location, entry["component"], session=session)
989         if l is None:
990             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
991             entry["location id"] = -1
992         else:
993             entry["location id"] = l.location_id
994
995         # Check the md5sum & size against existing files (if any)
996         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
997
998         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
999                                          entry["size"], entry["md5sum"], entry["location id"])
1000
1001         if found is None:
1002             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1003         elif found is False and poolfile is not None:
1004             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1005         else:
1006             if poolfile is None:
1007                 entry["files id"] = None
1008             else:
1009                 entry["files id"] = poolfile.file_id
1010
1011         # Check for packages that have moved from one component to another
1012         entry['suite'] = suite
1013         arch_list = [entry["architecture"], 'all']
1014         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1015             [suite], arch_list = arch_list, session = session)
1016         if component is not None:
1017             entry["othercomponents"] = component
1018
1019     def check_files(self, action=True):
1020         file_keys = self.pkg.files.keys()
1021         holding = Holding()
1022         cnf = Config()
1023
1024         if action:
1025             cwd = os.getcwd()
1026             os.chdir(self.pkg.directory)
1027             for f in file_keys:
1028                 ret = holding.copy_to_holding(f)
1029                 if ret is not None:
1030                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1031
1032             os.chdir(cwd)
1033
1034         # check we already know the changes file
1035         # [NB: this check must be done post-suite mapping]
1036         base_filename = os.path.basename(self.pkg.changes_file)
1037
1038         session = DBConn().session()
1039
1040         try:
1041             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1042             # if in the pool or in a queue other than unchecked, reject
1043             if (dbc.in_queue is None) \
1044                    or (dbc.in_queue is not None
1045                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1046                 self.rejects.append("%s file already known to dak" % base_filename)
1047         except NoResultFound, e:
1048             # not known, good
1049             pass
1050
1051         has_binaries = False
1052         has_source = False
1053
1054         for f, entry in self.pkg.files.items():
1055             # Ensure the file does not already exist in one of the accepted directories
1056             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1057                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1058                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1059                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1060
1061             if not re_taint_free.match(f):
1062                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1063
1064             # Check the file is readable
1065             if os.access(f, os.R_OK) == 0:
1066                 # When running in -n, copy_to_holding() won't have
1067                 # generated the reject_message, so we need to.
1068                 if action:
1069                     if os.path.exists(f):
1070                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1071                     else:
1072                         # Don't directly reject, mark to check later to deal with orig's
1073                         # we can find in the pool
1074                         self.later_check_files.append(f)
1075                 entry["type"] = "unreadable"
1076                 continue
1077
1078             # If it's byhand skip remaining checks
1079             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1080                 entry["byhand"] = 1
1081                 entry["type"] = "byhand"
1082
1083             # Checks for a binary package...
1084             elif re_isadeb.match(f):
1085                 has_binaries = True
1086                 entry["type"] = "deb"
1087
1088                 # This routine appends to self.rejects/warnings as appropriate
1089                 self.binary_file_checks(f, session)
1090
1091             # Checks for a source package...
1092             elif re_issource.match(f):
1093                 has_source = True
1094
1095                 # This routine appends to self.rejects/warnings as appropriate
1096                 self.source_file_checks(f, session)
1097
1098             # Not a binary or source package?  Assume byhand...
1099             else:
1100                 entry["byhand"] = 1
1101                 entry["type"] = "byhand"
1102
1103             # Per-suite file checks
1104             entry["oldfiles"] = {}
1105             for suite in self.pkg.changes["distribution"].keys():
1106                 self.per_suite_file_checks(f, suite, session)
1107
1108         session.close()
1109
1110         # If the .changes file says it has source, it must have source.
1111         if self.pkg.changes["architecture"].has_key("source"):
1112             if not has_source:
1113                 self.rejects.append("no source found and Architecture line in changes mention source.")
1114
1115             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1116                 self.rejects.append("source only uploads are not supported.")
1117
1118     ###########################################################################
1119
1120     def __dsc_filename(self):
1121         """
1122         Returns: (Status, Dsc_Filename)
1123         where
1124           Status: Boolean; True when there was no error, False otherwise
1125           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1126         """
1127         dsc_filename = None
1128
1129         # find the dsc
1130         for name, entry in self.pkg.files.items():
1131             if entry.has_key("type") and entry["type"] == "dsc":
1132                 if dsc_filename:
1133                     return False, "cannot process a .changes file with multiple .dsc's."
1134                 else:
1135                     dsc_filename = name
1136
1137         if not dsc_filename:
1138             return False, "source uploads must contain a dsc file"
1139
1140         return True, dsc_filename
1141
1142     def load_dsc(self, action=True, signing_rules=1):
1143         """
1144         Find and load the dsc from self.pkg.files into self.dsc
1145
1146         Returns: (Status, Reason)
1147         where
1148           Status: Boolean; True when there was no error, False otherwise
1149           Reason: String; When Status is False this describes the error
1150         """
1151
1152         # find the dsc
1153         (status, dsc_filename) = self.__dsc_filename()
1154         if not status:
1155             # If status is false, dsc_filename has the reason
1156             return False, dsc_filename
1157
1158         try:
1159             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1160         except CantOpenError:
1161             if not action:
1162                 return False, "%s: can't read file." % (dsc_filename)
1163         except ParseChangesError, line:
1164             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1165         except InvalidDscError, line:
1166             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1167         except ChangesUnicodeError:
1168             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1169
1170         return True, None
1171
1172     ###########################################################################
1173
1174     def check_dsc(self, action=True, session=None):
1175         """Returns bool indicating whether or not the source changes are valid"""
1176         # Ensure there is source to check
1177         if not self.pkg.changes["architecture"].has_key("source"):
1178             return True
1179
1180         (status, reason) = self.load_dsc(action=action)
1181         if not status:
1182             self.rejects.append(reason)
1183             return False
1184         (status, dsc_filename) = self.__dsc_filename()
1185         if not status:
1186             # If status is false, dsc_filename has the reason
1187             self.rejects.append(dsc_filename)
1188             return False
1189
1190         # Build up the file list of files mentioned by the .dsc
1191         try:
1192             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1193         except NoFilesFieldError:
1194             self.rejects.append("%s: no Files: field." % (dsc_filename))
1195             return False
1196         except UnknownFormatError, format:
1197             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1198             return False
1199         except ParseChangesError, line:
1200             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1201             return False
1202
1203         # Enforce mandatory fields
1204         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1205             if not self.pkg.dsc.has_key(i):
1206                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1207                 return False
1208
1209         # Validate the source and version fields
1210         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1211             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1212         if not re_valid_version.match(self.pkg.dsc["version"]):
1213             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1214
1215         # Only a limited list of source formats are allowed in each suite
1216         for dist in self.pkg.changes["distribution"].keys():
1217             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1218             if self.pkg.dsc["format"] not in allowed:
1219                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1220
1221         # Validate the Maintainer field
1222         try:
1223             # We ignore the return value
1224             fix_maintainer(self.pkg.dsc["maintainer"])
1225         except ParseMaintError, msg:
1226             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1227                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1228
1229         # Validate the build-depends field(s)
1230         for field_name in [ "build-depends", "build-depends-indep" ]:
1231             field = self.pkg.dsc.get(field_name)
1232             if field:
1233                 # Have apt try to parse them...
1234                 try:
1235                     apt_pkg.ParseSrcDepends(field)
1236                 except:
1237                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1238
1239         # Ensure the version number in the .dsc matches the version number in the .changes
1240         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1241         changes_version = self.pkg.files[dsc_filename]["version"]
1242
1243         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1244             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1245
1246         # Ensure the Files field contain only what's expected
1247         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1248
1249         # Ensure source is newer than existing source in target suites
1250         session = DBConn().session()
1251         self.check_source_against_db(dsc_filename, session)
1252         self.check_dsc_against_db(dsc_filename, session)
1253
1254         dbchg = get_dbchange(self.pkg.changes_file, session)
1255
1256         # Finally, check if we're missing any files
1257         for f in self.later_check_files:
1258             print 'XXX: %s' % f
1259             # Check if we've already processed this file if we have a dbchg object
1260             ok = False
1261             if dbchg:
1262                 for pf in dbchg.files:
1263                     if pf.filename == f and pf.processed:
1264                         self.notes.append('%s was already processed so we can go ahead' % f)
1265                         ok = True
1266                         del self.pkg.files[f]
1267             if not ok:
1268                 self.rejects.append("Could not find file %s references in changes" % f)
1269
1270         session.close()
1271
1272         return True
1273
1274     ###########################################################################
1275
1276     def get_changelog_versions(self, source_dir):
1277         """Extracts a the source package and (optionally) grabs the
1278         version history out of debian/changelog for the BTS."""
1279
1280         cnf = Config()
1281
1282         # Find the .dsc (again)
1283         dsc_filename = None
1284         for f in self.pkg.files.keys():
1285             if self.pkg.files[f]["type"] == "dsc":
1286                 dsc_filename = f
1287
1288         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1289         if not dsc_filename:
1290             return
1291
1292         # Create a symlink mirror of the source files in our temporary directory
1293         for f in self.pkg.files.keys():
1294             m = re_issource.match(f)
1295             if m:
1296                 src = os.path.join(source_dir, f)
1297                 # If a file is missing for whatever reason, give up.
1298                 if not os.path.exists(src):
1299                     return
1300                 ftype = m.group(3)
1301                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1302                    self.pkg.orig_files[f].has_key("path"):
1303                     continue
1304                 dest = os.path.join(os.getcwd(), f)
1305                 os.symlink(src, dest)
1306
1307         # If the orig files are not a part of the upload, create symlinks to the
1308         # existing copies.
1309         for orig_file in self.pkg.orig_files.keys():
1310             if not self.pkg.orig_files[orig_file].has_key("path"):
1311                 continue
1312             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1313             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1314
1315         # Extract the source
1316         try:
1317             unpacked = UnpackedSource(dsc_filename)
1318         except:
1319             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1320             return
1321
1322         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1323             return
1324
1325         # Get the upstream version
1326         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1327         if re_strip_revision.search(upstr_version):
1328             upstr_version = re_strip_revision.sub('', upstr_version)
1329
1330         # Ensure the changelog file exists
1331         changelog_file = unpacked.get_changelog_file()
1332         if changelog_file is None:
1333             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1334             return
1335
1336         # Parse the changelog
1337         self.pkg.dsc["bts changelog"] = ""
1338         for line in changelog_file.readlines():
1339             m = re_changelog_versions.match(line)
1340             if m:
1341                 self.pkg.dsc["bts changelog"] += line
1342         changelog_file.close()
1343         unpacked.cleanup()
1344
1345         # Check we found at least one revision in the changelog
1346         if not self.pkg.dsc["bts changelog"]:
1347             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1348
1349     def check_source(self):
1350         # Bail out if:
1351         #    a) there's no source
1352         if not self.pkg.changes["architecture"].has_key("source"):
1353             return
1354
1355         tmpdir = utils.temp_dirname()
1356
1357         # Move into the temporary directory
1358         cwd = os.getcwd()
1359         os.chdir(tmpdir)
1360
1361         # Get the changelog version history
1362         self.get_changelog_versions(cwd)
1363
1364         # Move back and cleanup the temporary tree
1365         os.chdir(cwd)
1366
1367         try:
1368             shutil.rmtree(tmpdir)
1369         except OSError, e:
1370             if e.errno != errno.EACCES:
1371                 print "foobar"
1372                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1373
1374             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1375             # We probably have u-r or u-w directories so chmod everything
1376             # and try again.
1377             cmd = "chmod -R u+rwx %s" % (tmpdir)
1378             result = os.system(cmd)
1379             if result != 0:
1380                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1381             shutil.rmtree(tmpdir)
1382         except Exception, e:
1383             print "foobar2 (%s)" % e
1384             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1385
1386     ###########################################################################
1387     def ensure_hashes(self):
1388         # Make sure we recognise the format of the Files: field in the .changes
1389         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1390         if len(format) == 2:
1391             format = int(format[0]), int(format[1])
1392         else:
1393             format = int(float(format[0])), 0
1394
1395         # We need to deal with the original changes blob, as the fields we need
1396         # might not be in the changes dict serialised into the .dak anymore.
1397         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1398
1399         # Copy the checksums over to the current changes dict.  This will keep
1400         # the existing modifications to it intact.
1401         for field in orig_changes:
1402             if field.startswith('checksums-'):
1403                 self.pkg.changes[field] = orig_changes[field]
1404
1405         # Check for unsupported hashes
1406         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1407             self.rejects.append(j)
1408
1409         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1410             self.rejects.append(j)
1411
1412         # We have to calculate the hash if we have an earlier changes version than
1413         # the hash appears in rather than require it exist in the changes file
1414         for hashname, hashfunc, version in utils.known_hashes:
1415             # TODO: Move _ensure_changes_hash into this class
1416             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1417                 self.rejects.append(j)
1418             if "source" in self.pkg.changes["architecture"]:
1419                 # TODO: Move _ensure_dsc_hash into this class
1420                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1421                     self.rejects.append(j)
1422
1423     def check_hashes(self):
1424         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1425             self.rejects.append(m)
1426
1427         for m in utils.check_size(".changes", self.pkg.files):
1428             self.rejects.append(m)
1429
1430         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1431             self.rejects.append(m)
1432
1433         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1434             self.rejects.append(m)
1435
1436         self.ensure_hashes()
1437
1438     ###########################################################################
1439
1440     def ensure_orig(self, target_dir='.', session=None):
1441         """
1442         Ensures that all orig files mentioned in the changes file are present
1443         in target_dir. If they do not exist, they are symlinked into place.
1444
1445         An list containing the symlinks that were created are returned (so they
1446         can be removed).
1447         """
1448
1449         symlinked = []
1450         cnf = Config()
1451
1452         for filename, entry in self.pkg.dsc_files.iteritems():
1453             if not re_is_orig_source.match(filename):
1454                 # File is not an orig; ignore
1455                 continue
1456
1457             if os.path.exists(filename):
1458                 # File exists, no need to continue
1459                 continue
1460
1461             def symlink_if_valid(path):
1462                 f = utils.open_file(path)
1463                 md5sum = apt_pkg.md5sum(f)
1464                 f.close()
1465
1466                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1467                 expected = (int(entry['size']), entry['md5sum'])
1468
1469                 if fingerprint != expected:
1470                     return False
1471
1472                 dest = os.path.join(target_dir, filename)
1473
1474                 os.symlink(path, dest)
1475                 symlinked.append(dest)
1476
1477                 return True
1478
1479             session_ = session
1480             if session is None:
1481                 session_ = DBConn().session()
1482
1483             found = False
1484
1485             # Look in the pool
1486             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1487                 poolfile_path = os.path.join(
1488                     poolfile.location.path, poolfile.filename
1489                 )
1490
1491                 if symlink_if_valid(poolfile_path):
1492                     found = True
1493                     break
1494
1495             if session is None:
1496                 session_.close()
1497
1498             if found:
1499                 continue
1500
1501             # Look in some other queues for the file
1502             queues = ('New', 'Byhand', 'ProposedUpdates',
1503                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1504
1505             for queue in queues:
1506                 if not cnf.get('Dir::Queue::%s' % queue):
1507                     continue
1508
1509                 queuefile_path = os.path.join(
1510                     cnf['Dir::Queue::%s' % queue], filename
1511                 )
1512
1513                 if not os.path.exists(queuefile_path):
1514                     # Does not exist in this queue
1515                     continue
1516
1517                 if symlink_if_valid(queuefile_path):
1518                     break
1519
1520         return symlinked
1521
1522     ###########################################################################
1523
1524     def check_lintian(self):
1525         """
1526         Extends self.rejects by checking the output of lintian against tags
1527         specified in Dinstall::LintianTags.
1528         """
1529
1530         cnf = Config()
1531
1532         # Don't reject binary uploads
1533         if not self.pkg.changes['architecture'].has_key('source'):
1534             return
1535
1536         # Only check some distributions
1537         for dist in ('unstable', 'experimental'):
1538             if dist in self.pkg.changes['distribution']:
1539                 break
1540         else:
1541             return
1542
1543         # If we do not have a tagfile, don't do anything
1544         tagfile = cnf.get("Dinstall::LintianTags")
1545         if not tagfile:
1546             return
1547
1548         # Parse the yaml file
1549         sourcefile = file(tagfile, 'r')
1550         sourcecontent = sourcefile.read()
1551         sourcefile.close()
1552
1553         try:
1554             lintiantags = yaml.load(sourcecontent)['lintian']
1555         except yaml.YAMLError, msg:
1556             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1557             return
1558
1559         # Try and find all orig mentioned in the .dsc
1560         symlinked = self.ensure_orig()
1561
1562         # Setup the input file for lintian
1563         fd, temp_filename = utils.temp_filename()
1564         temptagfile = os.fdopen(fd, 'w')
1565         for tags in lintiantags.values():
1566             temptagfile.writelines(['%s\n' % x for x in tags])
1567         temptagfile.close()
1568
1569         try:
1570             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1571                 (temp_filename, self.pkg.changes_file)
1572
1573             result, output = commands.getstatusoutput(cmd)
1574         finally:
1575             # Remove our tempfile and any symlinks we created
1576             os.unlink(temp_filename)
1577
1578             for symlink in symlinked:
1579                 os.unlink(symlink)
1580
1581         if result == 2:
1582             utils.warn("lintian failed for %s [return code: %s]." % \
1583                 (self.pkg.changes_file, result))
1584             utils.warn(utils.prefix_multi_line_string(output, \
1585                 " [possible output:] "))
1586
1587         def log(*txt):
1588             if self.logger:
1589                 self.logger.log(
1590                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1591                 )
1592
1593         # Generate messages
1594         parsed_tags = parse_lintian_output(output)
1595         self.rejects.extend(
1596             generate_reject_messages(parsed_tags, lintiantags, log=log)
1597         )
1598
1599     ###########################################################################
1600     def check_urgency(self):
1601         cnf = Config()
1602         if self.pkg.changes["architecture"].has_key("source"):
1603             if not self.pkg.changes.has_key("urgency"):
1604                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1606             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1607                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1608                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1609                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1610
1611     ###########################################################################
1612
1613     # Sanity check the time stamps of files inside debs.
1614     # [Files in the near future cause ugly warnings and extreme time
1615     #  travel can cause errors on extraction]
1616
1617     def check_timestamps(self):
1618         Cnf = Config()
1619
1620         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1621         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1622         tar = TarTime(future_cutoff, past_cutoff)
1623
1624         for filename, entry in self.pkg.files.items():
1625             if entry["type"] == "deb":
1626                 tar.reset()
1627                 try:
1628                     deb_file = utils.open_file(filename)
1629                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1630                     deb_file.seek(0)
1631                     try:
1632                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1633                     except SystemError, e:
1634                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1635                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1636                             raise
1637                         deb_file.seek(0)
1638                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1639
1640                     deb_file.close()
1641
1642                     future_files = tar.future_files.keys()
1643                     if future_files:
1644                         num_future_files = len(future_files)
1645                         future_file = future_files[0]
1646                         future_date = tar.future_files[future_file]
1647                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1649
1650                     ancient_files = tar.ancient_files.keys()
1651                     if ancient_files:
1652                         num_ancient_files = len(ancient_files)
1653                         ancient_file = ancient_files[0]
1654                         ancient_date = tar.ancient_files[ancient_file]
1655                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1657                 except:
1658                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1659
1660     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1662             sponsored = False
1663         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1664             sponsored = False
1665             if uid_name == "":
1666                 sponsored = True
1667         else:
1668             sponsored = True
1669             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1670                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1671                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673                         self.pkg.changes["sponsoremail"] = uid_email
1674
1675         return sponsored
1676
1677
1678     ###########################################################################
1679     # check_signed_by_key checks
1680     ###########################################################################
1681
1682     def check_signed_by_key(self):
1683         """Ensure the .changes is signed by an authorized uploader."""
1684         session = DBConn().session()
1685
1686         # First of all we check that the person has proper upload permissions
1687         # and that this upload isn't blocked
1688         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1689
1690         if fpr is None:
1691             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1692             return
1693
1694         # TODO: Check that import-keyring adds UIDs properly
1695         if not fpr.uid:
1696             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1697             return
1698
1699         # Check that the fingerprint which uploaded has permission to do so
1700         self.check_upload_permissions(fpr, session)
1701
1702         # Check that this package is not in a transition
1703         self.check_transition(session)
1704
1705         session.close()
1706
1707
1708     def check_upload_permissions(self, fpr, session):
1709         # Check any one-off upload blocks
1710         self.check_upload_blocks(fpr, session)
1711
1712         # If the source_acl is None, source is never allowed
1713         if fpr.source_acl is None:
1714             if self.pkg.changes["architecture"].has_key("source"):
1715                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1716                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1717                 self.rejects.append(rej)
1718                 return
1719         # Do DM as a special case
1720         # DM is a special case unfortunately, so we check it first
1721         # (keys with no source access get more access than DMs in one
1722         #  way; DMs can only upload for their packages whether source
1723         #  or binary, whereas keys with no access might be able to
1724         #  upload some binaries)
1725         elif fpr.source_acl.access_level == 'dm':
1726             self.check_dm_upload(fpr, session)
1727         else:
1728             # If not a DM, we allow full upload rights
1729             uid_email = "%s@debian.org" % (fpr.uid.uid)
1730             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1731
1732
1733         # Check binary upload permissions
1734         # By this point we know that DMs can't have got here unless they
1735         # are allowed to deal with the package concerned so just apply
1736         # normal checks
1737         if fpr.binary_acl.access_level == 'full':
1738             return
1739
1740         # Otherwise we're in the map case
1741         tmparches = self.pkg.changes["architecture"].copy()
1742         tmparches.pop('source', None)
1743
1744         for bam in fpr.binary_acl_map:
1745             tmparches.pop(bam.architecture.arch_string, None)
1746
1747         if len(tmparches.keys()) > 0:
1748             if fpr.binary_reject:
1749                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750                 if len(tmparches.keys()) == 1:
1751                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1752                 else:
1753                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1754                 self.rejects.append(rej)
1755             else:
1756                 # TODO: This is where we'll implement reject vs throw away binaries later
1757                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1758                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1759                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1760                 self.rejects.append(rej)
1761
1762
1763     def check_upload_blocks(self, fpr, session):
1764         """Check whether any upload blocks apply to this source, source
1765            version, uid / fpr combination"""
1766
1767         def block_rej_template(fb):
1768             rej = 'Manual upload block in place for package %s' % fb.source
1769             if fb.version is not None:
1770                 rej += ', version %s' % fb.version
1771             return rej
1772
1773         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1774             # version is None if the block applies to all versions
1775             if fb.version is None or fb.version == self.pkg.changes['version']:
1776                 # Check both fpr and uid - either is enough to cause a reject
1777                 if fb.fpr is not None:
1778                     if fb.fpr.fingerprint == fpr.fingerprint:
1779                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1780                 if fb.uid is not None:
1781                     if fb.uid == fpr.uid:
1782                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1783
1784
1785     def check_dm_upload(self, fpr, session):
1786         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1787         ## none of the uploaded packages are NEW
1788         rej = False
1789         for f in self.pkg.files.keys():
1790             if self.pkg.files[f].has_key("byhand"):
1791                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1792                 rej = True
1793             if self.pkg.files[f].has_key("new"):
1794                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1795                 rej = True
1796
1797         if rej:
1798             return
1799
1800         r = get_newest_source(self.pkg.changes["source"], session)
1801
1802         if r is None:
1803             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1804             self.rejects.append(rej)
1805             return
1806
1807         if not r.dm_upload_allowed:
1808             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1809             self.rejects.append(rej)
1810             return
1811
1812         ## the Maintainer: field of the uploaded .changes file corresponds with
1813         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1814         ## uploads)
1815         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1816             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1817
1818         ## the most recent version of the package uploaded to unstable or
1819         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1820         ## non-developer maintainers cannot NMU or hijack packages)
1821
1822         # uploader includes the maintainer
1823         accept = False
1824         for uploader in r.uploaders:
1825             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1826             # Eww - I hope we never have two people with the same name in Debian
1827             if email == fpr.uid.uid or name == fpr.uid.name:
1828                 accept = True
1829                 break
1830
1831         if not accept:
1832             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1833             return
1834
1835         ## none of the packages are being taken over from other source packages
1836         for b in self.pkg.changes["binary"].keys():
1837             for suite in self.pkg.changes["distribution"].keys():
1838                 for s in get_source_by_package_and_suite(b, suite, session):
1839                     if s.source != self.pkg.changes["source"]:
1840                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1841
1842
1843
1844     def check_transition(self, session):
1845         cnf = Config()
1846
1847         sourcepkg = self.pkg.changes["source"]
1848
1849         # No sourceful upload -> no need to do anything else, direct return
1850         # We also work with unstable uploads, not experimental or those going to some
1851         # proposed-updates queue
1852         if "source" not in self.pkg.changes["architecture"] or \
1853            "unstable" not in self.pkg.changes["distribution"]:
1854             return
1855
1856         # Also only check if there is a file defined (and existant) with
1857         # checks.
1858         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1859         if transpath == "" or not os.path.exists(transpath):
1860             return
1861
1862         # Parse the yaml file
1863         sourcefile = file(transpath, 'r')
1864         sourcecontent = sourcefile.read()
1865         try:
1866             transitions = yaml.load(sourcecontent)
1867         except yaml.YAMLError, msg:
1868             # This shouldn't happen, there is a wrapper to edit the file which
1869             # checks it, but we prefer to be safe than ending up rejecting
1870             # everything.
1871             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1872             return
1873
1874         # Now look through all defined transitions
1875         for trans in transitions:
1876             t = transitions[trans]
1877             source = t["source"]
1878             expected = t["new"]
1879
1880             # Will be None if nothing is in testing.
1881             current = get_source_in_suite(source, "testing", session)
1882             if current is not None:
1883                 compare = apt_pkg.VersionCompare(current.version, expected)
1884
1885             if current is None or compare < 0:
1886                 # This is still valid, the current version in testing is older than
1887                 # the new version we wait for, or there is none in testing yet
1888
1889                 # Check if the source we look at is affected by this.
1890                 if sourcepkg in t['packages']:
1891                     # The source is affected, lets reject it.
1892
1893                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1894                         sourcepkg, trans)
1895
1896                     if current is not None:
1897                         currentlymsg = "at version %s" % (current.version)
1898                     else:
1899                         currentlymsg = "not present in testing"
1900
1901                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1902
1903                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1904 is part of a testing transition designed to get %s migrated (it is
1905 currently %s, we need version %s).  This transition is managed by the
1906 Release Team, and %s is the Release-Team member responsible for it.
1907 Please mail debian-release@lists.debian.org or contact %s directly if you
1908 need further assistance.  You might want to upload to experimental until this
1909 transition is done."""
1910                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1911
1912                     self.rejects.append(rejectmsg)
1913                     return
1914
1915     ###########################################################################
1916     # End check_signed_by_key checks
1917     ###########################################################################
1918
1919     def build_summaries(self):
1920         """ Build a summary of changes the upload introduces. """
1921
1922         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1923
1924         short_summary = summary
1925
1926         # This is for direport's benefit...
1927         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1928
1929         if byhand or new:
1930             summary += "Changes: " + f
1931
1932         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1933
1934         summary += self.announce(short_summary, 0)
1935
1936         return (summary, short_summary)
1937
1938     ###########################################################################
1939
1940     def close_bugs(self, summary, action):
1941         """
1942         Send mail to close bugs as instructed by the closes field in the changes file.
1943         Also add a line to summary if any work was done.
1944
1945         @type summary: string
1946         @param summary: summary text, as given by L{build_summaries}
1947
1948         @type action: bool
1949         @param action: Set to false no real action will be done.
1950
1951         @rtype: string
1952         @return: summary. If action was taken, extended by the list of closed bugs.
1953
1954         """
1955
1956         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1957
1958         bugs = self.pkg.changes["closes"].keys()
1959
1960         if not bugs:
1961             return summary
1962
1963         bugs.sort()
1964         summary += "Closing bugs: "
1965         for bug in bugs:
1966             summary += "%s " % (bug)
1967             if action:
1968                 self.update_subst()
1969                 self.Subst["__BUG_NUMBER__"] = bug
1970                 if self.pkg.changes["distribution"].has_key("stable"):
1971                     self.Subst["__STABLE_WARNING__"] = """
1972 Note that this package is not part of the released stable Debian
1973 distribution.  It may have dependencies on other unreleased software,
1974 or other instabilities.  Please take care if you wish to install it.
1975 The update will eventually make its way into the next released Debian
1976 distribution."""
1977                 else:
1978                     self.Subst["__STABLE_WARNING__"] = ""
1979                 mail_message = utils.TemplateSubst(self.Subst, template)
1980                 utils.send_mail(mail_message)
1981
1982                 # Clear up after ourselves
1983                 del self.Subst["__BUG_NUMBER__"]
1984                 del self.Subst["__STABLE_WARNING__"]
1985
1986         if action and self.logger:
1987             self.logger.log(["closing bugs"] + bugs)
1988
1989         summary += "\n"
1990
1991         return summary
1992
1993     ###########################################################################
1994
1995     def announce(self, short_summary, action):
1996         """
1997         Send an announce mail about a new upload.
1998
1999         @type short_summary: string
2000         @param short_summary: Short summary text to include in the mail
2001
2002         @type action: bool
2003         @param action: Set to false no real action will be done.
2004
2005         @rtype: string
2006         @return: Textstring about action taken.
2007
2008         """
2009
2010         cnf = Config()
2011         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2012
2013         # Only do announcements for source uploads with a recent dpkg-dev installed
2014         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2015            self.pkg.changes["architecture"].has_key("source"):
2016             return ""
2017
2018         lists_done = {}
2019         summary = ""
2020
2021         self.Subst["__SHORT_SUMMARY__"] = short_summary
2022
2023         for dist in self.pkg.changes["distribution"].keys():
2024             suite = get_suite(dist)
2025             if suite is None: continue
2026             announce_list = suite.announce
2027             if announce_list == "" or lists_done.has_key(announce_list):
2028                 continue
2029
2030             lists_done[announce_list] = 1
2031             summary += "Announcing to %s\n" % (announce_list)
2032
2033             if action:
2034                 self.update_subst()
2035                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2036                 if cnf.get("Dinstall::TrackingServer") and \
2037                    self.pkg.changes["architecture"].has_key("source"):
2038                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2039                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2040
2041                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2042                 utils.send_mail(mail_message)
2043
2044                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2045
2046         if cnf.FindB("Dinstall::CloseBugs"):
2047             summary = self.close_bugs(summary, action)
2048
2049         del self.Subst["__SHORT_SUMMARY__"]
2050
2051         return summary
2052
2053     ###########################################################################
2054     @session_wrapper
2055     def accept (self, summary, short_summary, session=None):
2056         """
2057         Accept an upload.
2058
2059         This moves all files referenced from the .changes into the pool,
2060         sends the accepted mail, announces to lists, closes bugs and
2061         also checks for override disparities. If enabled it will write out
2062         the version history for the BTS Version Tracking and will finally call
2063         L{queue_build}.
2064
2065         @type summary: string
2066         @param summary: Summary text
2067
2068         @type short_summary: string
2069         @param short_summary: Short summary
2070         """
2071
2072         cnf = Config()
2073         stats = SummaryStats()
2074
2075         print "Installing."
2076         self.logger.log(["installing changes", self.pkg.changes_file])
2077
2078         binaries = []
2079         poolfiles = []
2080
2081         # Add the .dsc file to the DB first
2082         for newfile, entry in self.pkg.files.items():
2083             if entry["type"] == "dsc":
2084                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2085                 for j in pfs:
2086                     poolfiles.append(j)
2087
2088         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2089         for newfile, entry in self.pkg.files.items():
2090             if entry["type"] == "deb":
2091                 b, pf = add_deb_to_db(self, newfile, session)
2092                 binaries.append(b)
2093                 poolfiles.append(pf)
2094
2095         # If this is a sourceful diff only upload that is moving
2096         # cross-component we need to copy the .orig files into the new
2097         # component too for the same reasons as above.
2098         # XXX: mhy: I think this should be in add_dsc_to_db
2099         if self.pkg.changes["architecture"].has_key("source"):
2100             for orig_file in self.pkg.orig_files.keys():
2101                 if not self.pkg.orig_files[orig_file].has_key("id"):
2102                     continue # Skip if it's not in the pool
2103                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2104                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2105                     continue # Skip if the location didn't change
2106
2107                 # Do the move
2108                 oldf = get_poolfile_by_id(orig_file_id, session)
2109                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2110                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2111                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2112
2113                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2114
2115                 # TODO: Care about size/md5sum collisions etc
2116                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2117
2118                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2119                 if newf is None:
2120                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2121                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2122
2123                     session.flush()
2124
2125                     # Don't reference the old file from this changes
2126                     for p in poolfiles:
2127                         if p.file_id == oldf.file_id:
2128                             poolfiles.remove(p)
2129
2130                     poolfiles.append(newf)
2131
2132                     # Fix up the DSC references
2133                     toremove = []
2134
2135                     for df in source.srcfiles:
2136                         if df.poolfile.file_id == oldf.file_id:
2137                             # Add a new DSC entry and mark the old one for deletion
2138                             # Don't do it in the loop so we don't change the thing we're iterating over
2139                             newdscf = DSCFile()
2140                             newdscf.source_id = source.source_id
2141                             newdscf.poolfile_id = newf.file_id
2142                             session.add(newdscf)
2143
2144                             toremove.append(df)
2145
2146                     for df in toremove:
2147                         session.delete(df)
2148
2149                     # Flush our changes
2150                     session.flush()
2151
2152                     # Make sure that our source object is up-to-date
2153                     session.expire(source)
2154
2155         # Add changelog information to the database
2156         self.store_changelog()
2157
2158         # Install the files into the pool
2159         for newfile, entry in self.pkg.files.items():
2160             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2161             utils.move(newfile, destination)
2162             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2163             stats.accept_bytes += float(entry["size"])
2164
2165         # Copy the .changes file across for suite which need it.
2166         copy_changes = dict([(x.copychanges, '')
2167                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2168                              if x.copychanges is not None])
2169
2170         for dest in copy_changes.keys():
2171             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2172
2173         # We're done - commit the database changes
2174         session.commit()
2175         # Our SQL session will automatically start a new transaction after
2176         # the last commit
2177
2178         # Now ensure that the metadata has been added
2179         # This has to be done after we copy the files into the pool
2180         # For source if we have it:
2181         if self.pkg.changes["architecture"].has_key("source"):
2182             import_metadata_into_db(source, session)
2183
2184         # Now for any of our binaries
2185         for b in binaries:
2186             import_metadata_into_db(b, session)
2187
2188         session.commit()
2189
2190         # Move the .changes into the 'done' directory
2191         utils.move(self.pkg.changes_file,
2192                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2193
2194         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2195             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2196
2197         self.update_subst()
2198         self.Subst["__SUMMARY__"] = summary
2199         mail_message = utils.TemplateSubst(self.Subst,
2200                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2201         utils.send_mail(mail_message)
2202         self.announce(short_summary, 1)
2203
2204         ## Helper stuff for DebBugs Version Tracking
2205         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2206             if self.pkg.changes["architecture"].has_key("source"):
2207                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2208                 version_history = os.fdopen(fd, 'w')
2209                 version_history.write(self.pkg.dsc["bts changelog"])
2210                 version_history.close()
2211                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2212                                       self.pkg.changes_file[:-8]+".versions")
2213                 os.rename(temp_filename, filename)
2214                 os.chmod(filename, 0644)
2215
2216             # Write out the binary -> source mapping.
2217             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2218             debinfo = os.fdopen(fd, 'w')
2219             for name, entry in sorted(self.pkg.files.items()):
2220                 if entry["type"] == "deb":
2221                     line = " ".join([entry["package"], entry["version"],
2222                                      entry["architecture"], entry["source package"],
2223                                      entry["source version"]])
2224                     debinfo.write(line+"\n")
2225             debinfo.close()
2226             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2227                                   self.pkg.changes_file[:-8]+".debinfo")
2228             os.rename(temp_filename, filename)
2229             os.chmod(filename, 0644)
2230
2231         session.commit()
2232
2233         # Set up our copy queues (e.g. buildd queues)
2234         for suite_name in self.pkg.changes["distribution"].keys():
2235             suite = get_suite(suite_name, session)
2236             for q in suite.copy_queues:
2237                 for f in poolfiles:
2238                     q.add_file_from_pool(f)
2239
2240         session.commit()
2241
2242         # Finally...
2243         stats.accept_count += 1
2244
2245     def check_override(self):
2246         """
2247         Checks override entries for validity. Mails "Override disparity" warnings,
2248         if that feature is enabled.
2249
2250         Abandons the check if
2251           - override disparity checks are disabled
2252           - mail sending is disabled
2253         """
2254
2255         cnf = Config()
2256
2257         # Abandon the check if override disparity checks have been disabled
2258         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2259             return
2260
2261         summary = self.pkg.check_override()
2262
2263         if summary == "":
2264             return
2265
2266         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2267
2268         self.update_subst()
2269         self.Subst["__SUMMARY__"] = summary
2270         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2271         utils.send_mail(mail_message)
2272         del self.Subst["__SUMMARY__"]
2273
2274     ###########################################################################
2275
2276     def remove(self, from_dir=None):
2277         """
2278         Used (for instance) in p-u to remove the package from unchecked
2279
2280         Also removes the package from holding area.
2281         """
2282         if from_dir is None:
2283             from_dir = self.pkg.directory
2284         h = Holding()
2285
2286         for f in self.pkg.files.keys():
2287             os.unlink(os.path.join(from_dir, f))
2288             if os.path.exists(os.path.join(h.holding_dir, f)):
2289                 os.unlink(os.path.join(h.holding_dir, f))
2290
2291         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2292         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2293             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2294
2295     ###########################################################################
2296
2297     def move_to_queue (self, queue):
2298         """
2299         Move files to a destination queue using the permissions in the table
2300         """
2301         h = Holding()
2302         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2303                    queue.path, perms=int(queue.change_perms, 8))
2304         for f in self.pkg.files.keys():
2305             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2306
2307     ###########################################################################
2308
2309     def force_reject(self, reject_files):
2310         """
2311         Forcefully move files from the current directory to the
2312         reject directory.  If any file already exists in the reject
2313         directory it will be moved to the morgue to make way for
2314         the new file.
2315
2316         @type reject_files: dict
2317         @param reject_files: file dictionary
2318
2319         """
2320
2321         cnf = Config()
2322
2323         for file_entry in reject_files:
2324             # Skip any files which don't exist or which we don't have permission to copy.
2325             if os.access(file_entry, os.R_OK) == 0:
2326                 continue
2327
2328             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2329
2330             try:
2331                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2332             except OSError, e:
2333                 # File exists?  Let's find a new name by adding a number
2334                 if e.errno == errno.EEXIST:
2335                     try:
2336                         dest_file = utils.find_next_free(dest_file, 255)
2337                     except NoFreeFilenameError:
2338                         # Something's either gone badly Pete Tong, or
2339                         # someone is trying to exploit us.
2340                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2341                         return
2342
2343                     # Make sure we really got it
2344                     try:
2345                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2346                     except OSError, e:
2347                         # Likewise
2348                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2349                         return
2350                 else:
2351                     raise
2352             # If we got here, we own the destination file, so we can
2353             # safely overwrite it.
2354             utils.move(file_entry, dest_file, 1, perms=0660)
2355             os.close(dest_fd)
2356
2357     ###########################################################################
2358     def do_reject (self, manual=0, reject_message="", notes=""):
2359         """
2360         Reject an upload. If called without a reject message or C{manual} is
2361         true, spawn an editor so the user can write one.
2362
2363         @type manual: bool
2364         @param manual: manual or automated rejection
2365
2366         @type reject_message: string
2367         @param reject_message: A reject message
2368
2369         @return: 0
2370
2371         """
2372         # If we weren't given a manual rejection message, spawn an
2373         # editor so the user can add one in...
2374         if manual and not reject_message:
2375             (fd, temp_filename) = utils.temp_filename()
2376             temp_file = os.fdopen(fd, 'w')
2377             if len(notes) > 0:
2378                 for note in notes:
2379                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2380                                     % (note.author, note.version, note.notedate, note.comment))
2381             temp_file.close()
2382             editor = os.environ.get("EDITOR","vi")
2383             answer = 'E'
2384             while answer == 'E':
2385                 os.system("%s %s" % (editor, temp_filename))
2386                 temp_fh = utils.open_file(temp_filename)
2387                 reject_message = "".join(temp_fh.readlines())
2388                 temp_fh.close()
2389                 print "Reject message:"
2390                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2391                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2392                 answer = "XXX"
2393                 while prompt.find(answer) == -1:
2394                     answer = utils.our_raw_input(prompt)
2395                     m = re_default_answer.search(prompt)
2396                     if answer == "":
2397                         answer = m.group(1)
2398                     answer = answer[:1].upper()
2399             os.unlink(temp_filename)
2400             if answer == 'A':
2401                 return 1
2402             elif answer == 'Q':
2403                 sys.exit(0)
2404
2405         print "Rejecting.\n"
2406
2407         cnf = Config()
2408
2409         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2410         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2411
2412         # Move all the files into the reject directory
2413         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2414         self.force_reject(reject_files)
2415
2416         # If we fail here someone is probably trying to exploit the race
2417         # so let's just raise an exception ...
2418         if os.path.exists(reason_filename):
2419             os.unlink(reason_filename)
2420         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2421
2422         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2423
2424         self.update_subst()
2425         if not manual:
2426             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2427             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2428             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2429             os.write(reason_fd, reject_message)
2430             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2431         else:
2432             # Build up the rejection email
2433             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2434             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2435             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2436             self.Subst["__REJECT_MESSAGE__"] = ""
2437             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2438             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2439             # Write the rejection email out as the <foo>.reason file
2440             os.write(reason_fd, reject_mail_message)
2441
2442         del self.Subst["__REJECTOR_ADDRESS__"]
2443         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2444         del self.Subst["__CC__"]
2445
2446         os.close(reason_fd)
2447
2448         # Send the rejection mail
2449         utils.send_mail(reject_mail_message)
2450
2451         if self.logger:
2452             self.logger.log(["rejected", self.pkg.changes_file])
2453
2454         return 0
2455
2456     ################################################################################
2457     def in_override_p(self, package, component, suite, binary_type, filename, session):
2458         """
2459         Check if a package already has override entries in the DB
2460
2461         @type package: string
2462         @param package: package name
2463
2464         @type component: string
2465         @param component: database id of the component
2466
2467         @type suite: int
2468         @param suite: database id of the suite
2469
2470         @type binary_type: string
2471         @param binary_type: type of the package
2472
2473         @type filename: string
2474         @param filename: filename we check
2475
2476         @return: the database result. But noone cares anyway.
2477
2478         """
2479
2480         cnf = Config()
2481
2482         if binary_type == "": # must be source
2483             file_type = "dsc"
2484         else:
2485             file_type = binary_type
2486
2487         # Override suite name; used for example with proposed-updates
2488         oldsuite = get_suite(suite, session)
2489         if (not oldsuite is None) and oldsuite.overridesuite:
2490             suite = oldsuite.overridesuite
2491
2492         result = get_override(package, suite, component, file_type, session)
2493
2494         # If checking for a source package fall back on the binary override type
2495         if file_type == "dsc" and len(result) < 1:
2496             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2497
2498         # Remember the section and priority so we can check them later if appropriate
2499         if len(result) > 0:
2500             result = result[0]
2501             self.pkg.files[filename]["override section"] = result.section.section
2502             self.pkg.files[filename]["override priority"] = result.priority.priority
2503             return result
2504
2505         return None
2506
2507     ################################################################################
2508     def get_anyversion(self, sv_list, suite):
2509         """
2510         @type sv_list: list
2511         @param sv_list: list of (suite, version) tuples to check
2512
2513         @type suite: string
2514         @param suite: suite name
2515
2516         Description: TODO
2517         """
2518         Cnf = Config()
2519         anyversion = None
2520         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2521         for (s, v) in sv_list:
2522             if s in [ x.lower() for x in anysuite ]:
2523                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2524                     anyversion = v
2525
2526         return anyversion
2527
2528     ################################################################################
2529
2530     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2531         """
2532         @type sv_list: list
2533         @param sv_list: list of (suite, version) tuples to check
2534
2535         @type filename: string
2536         @param filename: XXX
2537
2538         @type new_version: string
2539         @param new_version: XXX
2540
2541         Ensure versions are newer than existing packages in target
2542         suites and that cross-suite version checking rules as
2543         set out in the conf file are satisfied.
2544         """
2545
2546         cnf = Config()
2547
2548         # Check versions for each target suite
2549         for target_suite in self.pkg.changes["distribution"].keys():
2550             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2551             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2552
2553             # Enforce "must be newer than target suite" even if conffile omits it
2554             if target_suite not in must_be_newer_than:
2555                 must_be_newer_than.append(target_suite)
2556
2557             for (suite, existent_version) in sv_list:
2558                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2559
2560                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2561                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2562
2563                 if suite in must_be_older_than and vercmp > -1:
2564                     cansave = 0
2565
2566                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2567                         # we really use the other suite, ignoring the conflicting one ...
2568                         addsuite = self.pkg.changes["distribution-version"][suite]
2569
2570                         add_version = self.get_anyversion(sv_list, addsuite)
2571                         target_version = self.get_anyversion(sv_list, target_suite)
2572
2573                         if not add_version:
2574                             # not add_version can only happen if we map to a suite
2575                             # that doesn't enhance the suite we're propup'ing from.
2576                             # so "propup-ver x a b c; map a d" is a problem only if
2577                             # d doesn't enhance a.
2578                             #
2579                             # i think we could always propagate in this case, rather
2580                             # than complaining. either way, this isn't a REJECT issue
2581                             #
2582                             # And - we really should complain to the dorks who configured dak
2583                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2584                             self.pkg.changes.setdefault("propdistribution", {})
2585                             self.pkg.changes["propdistribution"][addsuite] = 1
2586                             cansave = 1
2587                         elif not target_version:
2588                             # not targets_version is true when the package is NEW
2589                             # we could just stick with the "...old version..." REJECT
2590                             # for this, I think.
2591                             self.rejects.append("Won't propogate NEW packages.")
2592                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2593                             # propogation would be redundant. no need to reject though.
2594                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2595                             cansave = 1
2596                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2597                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2598                             # propogate!!
2599                             self.warnings.append("Propogating upload to %s" % (addsuite))
2600                             self.pkg.changes.setdefault("propdistribution", {})
2601                             self.pkg.changes["propdistribution"][addsuite] = 1
2602                             cansave = 1
2603
2604                     if not cansave:
2605                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2606
2607     ################################################################################
2608     def check_binary_against_db(self, filename, session):
2609         # Ensure version is sane
2610         self.cross_suite_version_check( \
2611             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2612                 self.pkg.files[filename]["architecture"], session),
2613             filename, self.pkg.files[filename]["version"], sourceful=False)
2614
2615         # Check for any existing copies of the file
2616         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2617         q = q.filter_by(version=self.pkg.files[filename]["version"])
2618         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2619
2620         if q.count() > 0:
2621             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2622
2623     ################################################################################
2624
2625     def check_source_against_db(self, filename, session):
2626         source = self.pkg.dsc.get("source")
2627         version = self.pkg.dsc.get("version")
2628
2629         # Ensure version is sane
2630         self.cross_suite_version_check( \
2631             get_suite_version_by_source(source, session), filename, version,
2632             sourceful=True)
2633
2634     ################################################################################
2635     def check_dsc_against_db(self, filename, session):
2636         """
2637
2638         @warning: NB: this function can remove entries from the 'files' index [if
2639          the orig tarball is a duplicate of the one in the archive]; if
2640          you're iterating over 'files' and call this function as part of
2641          the loop, be sure to add a check to the top of the loop to
2642          ensure you haven't just tried to dereference the deleted entry.
2643
2644         """
2645
2646         Cnf = Config()
2647         self.pkg.orig_files = {} # XXX: do we need to clear it?
2648         orig_files = self.pkg.orig_files
2649
2650         # Try and find all files mentioned in the .dsc.  This has
2651         # to work harder to cope with the multiple possible
2652         # locations of an .orig.tar.gz.
2653         # The ordering on the select is needed to pick the newest orig
2654         # when it exists in multiple places.
2655         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2656             found = None
2657             if self.pkg.files.has_key(dsc_name):
2658                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2659                 actual_size = int(self.pkg.files[dsc_name]["size"])
2660                 found = "%s in incoming" % (dsc_name)
2661
2662                 # Check the file does not already exist in the archive
2663                 ql = get_poolfile_like_name(dsc_name, session)
2664
2665                 # Strip out anything that isn't '%s' or '/%s$'
2666                 for i in ql:
2667                     if not i.filename.endswith(dsc_name):
2668                         ql.remove(i)
2669
2670                 # "[dak] has not broken them.  [dak] has fixed a
2671                 # brokenness.  Your crappy hack exploited a bug in
2672                 # the old dinstall.
2673                 #
2674                 # "(Come on!  I thought it was always obvious that
2675                 # one just doesn't release different files with
2676                 # the same name and version.)"
2677                 #                        -- ajk@ on d-devel@l.d.o
2678
2679                 if len(ql) > 0:
2680                     # Ignore exact matches for .orig.tar.gz
2681                     match = 0
2682                     if re_is_orig_source.match(dsc_name):
2683                         for i in ql:
2684                             if self.pkg.files.has_key(dsc_name) and \
2685                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2686                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2687                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2688                                 # TODO: Don't delete the entry, just mark it as not needed
2689                                 # This would fix the stupidity of changing something we often iterate over
2690                                 # whilst we're doing it
2691                                 del self.pkg.files[dsc_name]
2692                                 dsc_entry["files id"] = i.file_id
2693                                 if not orig_files.has_key(dsc_name):
2694                                     orig_files[dsc_name] = {}
2695                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2696                                 match = 1
2697
2698                                 # Don't bitch that we couldn't find this file later
2699                                 try:
2700                                     self.later_check_files.remove(dsc_name)
2701                                 except ValueError:
2702                                     pass
2703
2704
2705                     if not match:
2706                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2707
2708             elif re_is_orig_source.match(dsc_name):
2709                 # Check in the pool
2710                 ql = get_poolfile_like_name(dsc_name, session)
2711
2712                 # Strip out anything that isn't '%s' or '/%s$'
2713                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2714                 for i in ql:
2715                     if not i.filename.endswith(dsc_name):
2716                         ql.remove(i)
2717
2718                 if len(ql) > 0:
2719                     # Unfortunately, we may get more than one match here if,
2720                     # for example, the package was in potato but had an -sa
2721                     # upload in woody.  So we need to choose the right one.
2722
2723                     # default to something sane in case we don't match any or have only one
2724                     x = ql[0]
2725
2726                     if len(ql) > 1:
2727                         for i in ql:
2728                             old_file = os.path.join(i.location.path, i.filename)
2729                             old_file_fh = utils.open_file(old_file)
2730                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2731                             old_file_fh.close()
2732                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2733                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2734                                 x = i
2735
2736                     old_file = os.path.join(i.location.path, i.filename)
2737                     old_file_fh = utils.open_file(old_file)
2738                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2739                     old_file_fh.close()
2740                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2741                     found = old_file
2742                     suite_type = x.location.archive_type
2743                     # need this for updating dsc_files in install()
2744                     dsc_entry["files id"] = x.file_id
2745                     # See install() in process-accepted...
2746                     if not orig_files.has_key(dsc_name):
2747                         orig_files[dsc_name] = {}
2748                     orig_files[dsc_name]["id"] = x.file_id
2749                     orig_files[dsc_name]["path"] = old_file
2750                     orig_files[dsc_name]["location"] = x.location.location_id
2751                 else:
2752                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2753                     # Not there? Check the queue directories...
2754                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2755                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2756                             continue
2757                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2758                         if os.path.exists(in_otherdir):
2759                             in_otherdir_fh = utils.open_file(in_otherdir)
2760                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2761                             in_otherdir_fh.close()
2762                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2763                             found = in_otherdir
2764                             if not orig_files.has_key(dsc_name):
2765                                 orig_files[dsc_name] = {}
2766                             orig_files[dsc_name]["path"] = in_otherdir
2767
2768                     if not found:
2769                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2770                         continue
2771             else:
2772                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2773                 continue
2774             if actual_md5 != dsc_entry["md5sum"]:
2775                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2776             if actual_size != int(dsc_entry["size"]):
2777                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2778
2779     ################################################################################
2780     # This is used by process-new and process-holding to recheck a changes file
2781     # at the time we're running.  It mainly wraps various other internal functions
2782     # and is similar to accepted_checks - these should probably be tidied up
2783     # and combined
2784     def recheck(self, session):
2785         cnf = Config()
2786         for f in self.pkg.files.keys():
2787             # The .orig.tar.gz can disappear out from under us is it's a
2788             # duplicate of one in the archive.
2789             if not self.pkg.files.has_key(f):
2790                 continue
2791
2792             entry = self.pkg.files[f]
2793
2794             # Check that the source still exists
2795             if entry["type"] == "deb":
2796                 source_version = entry["source version"]
2797                 source_package = entry["source package"]
2798                 if not self.pkg.changes["architecture"].has_key("source") \
2799                    and not source_exists(source_package, source_version, \
2800                     suites = self.pkg.changes["distribution"].keys(), session = session):
2801                     source_epochless_version = re_no_epoch.sub('', source_version)
2802                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2803                     found = False
2804                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2805                         if cnf.has_key("Dir::Queue::%s" % (q)):
2806                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2807                                 found = True
2808                     if not found:
2809                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2810
2811             # Version and file overwrite checks
2812             if entry["type"] == "deb":
2813                 self.check_binary_against_db(f, session)
2814             elif entry["type"] == "dsc":
2815                 self.check_source_against_db(f, session)
2816                 self.check_dsc_against_db(f, session)
2817
2818     ################################################################################
2819     def accepted_checks(self, overwrite_checks, session):
2820         # Recheck anything that relies on the database; since that's not
2821         # frozen between accept and our run time when called from p-a.
2822
2823         # overwrite_checks is set to False when installing to stable/oldstable
2824
2825         propogate={}
2826         nopropogate={}
2827
2828         # Find the .dsc (again)
2829         dsc_filename = None
2830         for f in self.pkg.files.keys():
2831             if self.pkg.files[f]["type"] == "dsc":
2832                 dsc_filename = f
2833
2834         for checkfile in self.pkg.files.keys():
2835             # The .orig.tar.gz can disappear out from under us is it's a
2836             # duplicate of one in the archive.
2837             if not self.pkg.files.has_key(checkfile):
2838                 continue
2839
2840             entry = self.pkg.files[checkfile]
2841
2842             # Check that the source still exists
2843             if entry["type"] == "deb":
2844                 source_version = entry["source version"]
2845                 source_package = entry["source package"]
2846                 if not self.pkg.changes["architecture"].has_key("source") \
2847                    and not source_exists(source_package, source_version, \
2848                     suites = self.pkg.changes["distribution"].keys(), \
2849                     session = session):
2850                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2851
2852             # Version and file overwrite checks
2853             if overwrite_checks:
2854                 if entry["type"] == "deb":
2855                     self.check_binary_against_db(checkfile, session)
2856                 elif entry["type"] == "dsc":
2857                     self.check_source_against_db(checkfile, session)
2858                     self.check_dsc_against_db(dsc_filename, session)
2859
2860             # propogate in the case it is in the override tables:
2861             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2862                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2863                     propogate[suite] = 1
2864                 else:
2865                     nopropogate[suite] = 1
2866
2867         for suite in propogate.keys():
2868             if suite in nopropogate:
2869                 continue
2870             self.pkg.changes["distribution"][suite] = 1
2871
2872         for checkfile in self.pkg.files.keys():
2873             # Check the package is still in the override tables
2874             for suite in self.pkg.changes["distribution"].keys():
2875                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2876                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2877
2878     ################################################################################
2879     # If any file of an upload has a recent mtime then chances are good
2880     # the file is still being uploaded.
2881
2882     def upload_too_new(self):
2883         cnf = Config()
2884         too_new = False
2885         # Move back to the original directory to get accurate time stamps
2886         cwd = os.getcwd()
2887         os.chdir(self.pkg.directory)
2888         file_list = self.pkg.files.keys()
2889         file_list.extend(self.pkg.dsc_files.keys())
2890         file_list.append(self.pkg.changes_file)
2891         for f in file_list:
2892             try:
2893                 last_modified = time.time()-os.path.getmtime(f)
2894                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2895                     too_new = True
2896                     break
2897             except:
2898                 pass
2899
2900         os.chdir(cwd)
2901         return too_new
2902
2903     def store_changelog(self):
2904
2905         # Skip binary-only upload if it is not a bin-NMU
2906         if not self.pkg.changes['architecture'].has_key('source'):
2907             from daklib.regexes import re_bin_only_nmu
2908             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2909                 return
2910
2911         session = DBConn().session()
2912
2913         # Check if upload already has a changelog entry
2914         query = """SELECT changelog_id FROM changes WHERE source = :source
2915                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2916         if session.execute(query, {'source': self.pkg.changes['source'], \
2917                                    'version': self.pkg.changes['version'], \
2918                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2919             session.commit()
2920             return
2921
2922         # Add current changelog text into changelogs_text table, return created ID
2923         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2924         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2925
2926         # Link ID to the upload available in changes table
2927         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2928                    AND version = :version AND architecture = :architecture"""
2929         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2930                                 'version': self.pkg.changes['version'], \
2931                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2932
2933         session.commit()