]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge remote-tracking branch 'ansgar/package-set' into merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @type dsc: Upload.Pkg.dsc dict
122     @param dsc: (optional); Dsc dictionary
123
124     @type new: dict
125     @param new: new packages as returned by a previous call to this function, but override information may have changed
126
127     @rtype: dict
128     @return: dictionary of NEW components.
129
130     """
131     # TODO: This should all use the database instead of parsing the changes
132     # file again
133     byhand = {}
134     if new is None:
135         new = {}
136
137     dbchg = get_dbchange(filename, session)
138     if dbchg is None:
139         print "Warning: cannot find changes file in database; won't check byhand"
140
141     # Try to get the Package-Set field from an included .dsc file (if possible).
142     if dsc:
143         for package, entry in build_package_set(dsc, session).items():
144             if not new.has_key(package):
145                 new[package] = entry
146
147     # Build up a list of potentially new things
148     for name, f in files.items():
149         # Keep a record of byhand elements
150         if f["section"] == "byhand":
151             byhand[name] = 1
152             continue
153
154         pkg = f["package"]
155         priority = f["priority"]
156         section = f["section"]
157         file_type = get_type(f, session)
158         component = f["component"]
159
160         if file_type == "dsc":
161             priority = "source"
162
163         if not new.has_key(pkg):
164             new[pkg] = {}
165             new[pkg]["priority"] = priority
166             new[pkg]["section"] = section
167             new[pkg]["type"] = file_type
168             new[pkg]["component"] = component
169             new[pkg]["files"] = []
170         else:
171             old_type = new[pkg]["type"]
172             if old_type != file_type:
173                 # source gets trumped by deb or udeb
174                 if old_type == "dsc":
175                     new[pkg]["priority"] = priority
176                     new[pkg]["section"] = section
177                     new[pkg]["type"] = file_type
178                     new[pkg]["component"] = component
179
180         new[pkg]["files"].append(name)
181
182         if f.has_key("othercomponents"):
183             new[pkg]["othercomponents"] = f["othercomponents"]
184
185     # Fix up the list of target suites
186     cnf = Config()
187     for suite in changes["suite"].keys():
188         oldsuite = get_suite(suite, session)
189         if not oldsuite:
190             print "WARNING: Invalid suite %s found" % suite
191             continue
192
193         if oldsuite.overridesuite:
194             newsuite = get_suite(oldsuite.overridesuite, session)
195
196             if newsuite:
197                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
198                     oldsuite.overridesuite, suite)
199                 del changes["suite"][suite]
200                 changes["suite"][oldsuite.overridesuite] = 1
201             else:
202                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
203                     oldsuite.overridesuite, suite)
204
205     # Check for unprocessed byhand files
206     if dbchg is not None:
207         for b in byhand.keys():
208             # Find the file entry in the database
209             found = False
210             for f in dbchg.files:
211                 if f.filename == b:
212                     found = True
213                     # If it's processed, we can ignore it
214                     if f.processed:
215                         del byhand[b]
216                     break
217
218             if not found:
219                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
220
221     # Check for new stuff
222     for suite in changes["suite"].keys():
223         for pkg in new.keys():
224             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
225             if len(ql) > 0:
226                 for file_entry in new[pkg]["files"]:
227                     if files[file_entry].has_key("new"):
228                         del files[file_entry]["new"]
229                 del new[pkg]
230
231     if warn:
232         for s in ['stable', 'oldstable']:
233             if changes["suite"].has_key(s):
234                 print "WARNING: overrides will be added for %s!" % s
235         for pkg in new.keys():
236             if new[pkg].has_key("othercomponents"):
237                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
238
239     return new, byhand
240
241 ################################################################################
242
243 def check_valid(new, session = None):
244     """
245     Check if section and priority for NEW packages exist in database.
246     Additionally does sanity checks:
247       - debian-installer packages have to be udeb (or source)
248       - non debian-installer packages can not be udeb
249       - source priority can only be assigned to dsc file types
250
251     @type new: dict
252     @param new: Dict of new packages with their section, priority and type.
253
254     """
255     for pkg in new.keys():
256         section_name = new[pkg]["section"]
257         priority_name = new[pkg]["priority"]
258         file_type = new[pkg]["type"]
259
260         section = get_section(section_name, session)
261         if section is None:
262             new[pkg]["section id"] = -1
263         else:
264             new[pkg]["section id"] = section.section_id
265
266         priority = get_priority(priority_name, session)
267         if priority is None:
268             new[pkg]["priority id"] = -1
269         else:
270             new[pkg]["priority id"] = priority.priority_id
271
272         # Sanity checks
273         di = section_name.find("debian-installer") != -1
274
275         # If d-i, we must be udeb and vice-versa
276         if     (di and file_type not in ("udeb", "dsc")) or \
277            (not di and file_type == "udeb"):
278             new[pkg]["section id"] = -1
279
280         # If dsc we need to be source and vice-versa
281         if (priority == "source" and file_type != "dsc") or \
282            (priority != "source" and file_type == "dsc"):
283             new[pkg]["priority id"] = -1
284
285 ###############################################################################
286
287 # Used by Upload.check_timestamps
288 class TarTime(object):
289     def __init__(self, future_cutoff, past_cutoff):
290         self.reset()
291         self.future_cutoff = future_cutoff
292         self.past_cutoff = past_cutoff
293
294     def reset(self):
295         self.future_files = {}
296         self.ancient_files = {}
297
298     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
299         if MTime > self.future_cutoff:
300             self.future_files[Name] = MTime
301         if MTime < self.past_cutoff:
302             self.ancient_files[Name] = MTime
303
304 ###############################################################################
305
306 def prod_maintainer(notes, upload):
307     cnf = Config()
308
309     # Here we prepare an editor and get them ready to prod...
310     (fd, temp_filename) = utils.temp_filename()
311     temp_file = os.fdopen(fd, 'w')
312     for note in notes:
313         temp_file.write(note.comment)
314     temp_file.close()
315     editor = os.environ.get("EDITOR","vi")
316     answer = 'E'
317     while answer == 'E':
318         os.system("%s %s" % (editor, temp_filename))
319         temp_fh = utils.open_file(temp_filename)
320         prod_message = "".join(temp_fh.readlines())
321         temp_fh.close()
322         print "Prod message:"
323         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
324         prompt = "[P]rod, Edit, Abandon, Quit ?"
325         answer = "XXX"
326         while prompt.find(answer) == -1:
327             answer = utils.our_raw_input(prompt)
328             m = re_default_answer.search(prompt)
329             if answer == "":
330                 answer = m.group(1)
331             answer = answer[:1].upper()
332     os.unlink(temp_filename)
333     if answer == 'A':
334         return
335     elif answer == 'Q':
336         end()
337         sys.exit(0)
338     # Otherwise, do the proding...
339     user_email_address = utils.whoami() + " <%s>" % (
340         cnf["Dinstall::MyAdminAddress"])
341
342     Subst = upload.Subst
343
344     Subst["__FROM_ADDRESS__"] = user_email_address
345     Subst["__PROD_MESSAGE__"] = prod_message
346     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
347
348     prod_mail_message = utils.TemplateSubst(
349         Subst,cnf["Dir::Templates"]+"/process-new.prod")
350
351     # Send the prod mail
352     utils.send_mail(prod_mail_message)
353
354     print "Sent prodding message"
355
356 ################################################################################
357
358 def edit_note(note, upload, session, trainee=False):
359     # Write the current data to a temporary file
360     (fd, temp_filename) = utils.temp_filename()
361     editor = os.environ.get("EDITOR","vi")
362     answer = 'E'
363     while answer == 'E':
364         os.system("%s %s" % (editor, temp_filename))
365         temp_file = utils.open_file(temp_filename)
366         newnote = temp_file.read().rstrip()
367         temp_file.close()
368         print "New Note:"
369         print utils.prefix_multi_line_string(newnote,"  ")
370         prompt = "[D]one, Edit, Abandon, Quit ?"
371         answer = "XXX"
372         while prompt.find(answer) == -1:
373             answer = utils.our_raw_input(prompt)
374             m = re_default_answer.search(prompt)
375             if answer == "":
376                 answer = m.group(1)
377             answer = answer[:1].upper()
378     os.unlink(temp_filename)
379     if answer == 'A':
380         return
381     elif answer == 'Q':
382         end()
383         sys.exit(0)
384
385     comment = NewComment()
386     comment.package = upload.pkg.changes["source"]
387     comment.version = upload.pkg.changes["version"]
388     comment.comment = newnote
389     comment.author  = utils.whoami()
390     comment.trainee = trainee
391     session.add(comment)
392     session.commit()
393
394 ###############################################################################
395
396 # suite names DMs can upload to
397 dm_suites = ['unstable', 'experimental']
398
399 def get_newest_source(source, session):
400     'returns the newest DBSource object in dm_suites'
401     ## the most recent version of the package uploaded to unstable or
402     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
403     ## section of its control file
404     q = session.query(DBSource).filter_by(source = source). \
405         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
406         order_by(desc('source.version'))
407     return q.first()
408
409 def get_suite_version_by_source(source, session):
410     'returns a list of tuples (suite_name, version) for source package'
411     q = session.query(Suite.suite_name, DBSource.version). \
412         join(Suite.sources).filter_by(source = source)
413     return q.all()
414
415 def get_source_by_package_and_suite(package, suite_name, session):
416     '''
417     returns a DBSource query filtered by DBBinary.package and this package's
418     suite_name
419     '''
420     return session.query(DBSource). \
421         join(DBSource.binaries).filter_by(package = package). \
422         join(DBBinary.suites).filter_by(suite_name = suite_name)
423
424 def get_suite_version_by_package(package, arch_string, session):
425     '''
426     returns a list of tuples (suite_name, version) for binary package and
427     arch_string
428     '''
429     return session.query(Suite.suite_name, DBBinary.version). \
430         join(Suite.binaries).filter_by(package = package). \
431         join(DBBinary.architecture). \
432         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
433
434 class Upload(object):
435     """
436     Everything that has to do with an upload processed.
437
438     """
439     def __init__(self):
440         self.logger = None
441         self.pkg = Changes()
442         self.reset()
443
444     ###########################################################################
445
446     def reset (self):
447         """ Reset a number of internal variables."""
448
449         # Initialize the substitution template map
450         cnf = Config()
451         self.Subst = {}
452         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
453         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
454         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
455         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
456
457         self.rejects = []
458         self.warnings = []
459         self.notes = []
460
461         self.later_check_files = []
462
463         self.pkg.reset()
464
465     def package_info(self):
466         """
467         Format various messages from this Upload to send to the maintainer.
468         """
469
470         msgs = (
471             ('Reject Reasons', self.rejects),
472             ('Warnings', self.warnings),
473             ('Notes', self.notes),
474         )
475
476         msg = ''
477         for title, messages in msgs:
478             if messages:
479                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
480         msg += '\n\n'
481
482         return msg
483
484     ###########################################################################
485     def update_subst(self):
486         """ Set up the per-package template substitution mappings """
487
488         cnf = Config()
489
490         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
491         if not self.pkg.changes.has_key("architecture") or not \
492            isinstance(self.pkg.changes["architecture"], dict):
493             self.pkg.changes["architecture"] = { "Unknown" : "" }
494
495         # and maintainer2047 may not exist.
496         if not self.pkg.changes.has_key("maintainer2047"):
497             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
498
499         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
500         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
501         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
502
503         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
504         if self.pkg.changes["architecture"].has_key("source") and \
505            self.pkg.changes["changedby822"] != "" and \
506            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
507
508             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
509             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
510             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
511         else:
512             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
513             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
514             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
515
516         # Process policy doesn't set the fingerprint field and I don't want to make it
517         # do it for now as I don't want to have to deal with the case where we accepted
518         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
519         # the meantime so the package will be remarked as rejectable.  Urgh.
520         # TODO: Fix this properly
521         if self.pkg.changes.has_key('fingerprint'):
522             session = DBConn().session()
523             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
524             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
525                 if self.pkg.changes.has_key("sponsoremail"):
526                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
527             session.close()
528
529         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
530             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
531
532         # Apply any global override of the Maintainer field
533         if cnf.get("Dinstall::OverrideMaintainer"):
534             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
535             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
536
537         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
538         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
539         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
540         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
541
542     ###########################################################################
543     def load_changes(self, filename):
544         """
545         Load a changes file and setup a dictionary around it. Also checks for mandantory
546         fields  within.
547
548         @type filename: string
549         @param filename: Changes filename, full path.
550
551         @rtype: boolean
552         @return: whether the changes file was valid or not.  We may want to
553                  reject even if this is True (see what gets put in self.rejects).
554                  This is simply to prevent us even trying things later which will
555                  fail because we couldn't properly parse the file.
556         """
557         Cnf = Config()
558         self.pkg.changes_file = filename
559
560         # Parse the .changes field into a dictionary
561         try:
562             self.pkg.changes.update(parse_changes(filename))
563         except CantOpenError:
564             self.rejects.append("%s: can't read file." % (filename))
565             return False
566         except ParseChangesError, line:
567             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
568             return False
569         except ChangesUnicodeError:
570             self.rejects.append("%s: changes file not proper utf-8" % (filename))
571             return False
572
573         # Parse the Files field from the .changes into another dictionary
574         try:
575             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
576         except ParseChangesError, line:
577             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
578             return False
579         except UnknownFormatError, format:
580             self.rejects.append("%s: unknown format '%s'." % (filename, format))
581             return False
582
583         # Check for mandatory fields
584         for i in ("distribution", "source", "binary", "architecture",
585                   "version", "maintainer", "files", "changes", "description"):
586             if not self.pkg.changes.has_key(i):
587                 # Avoid undefined errors later
588                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
589                 return False
590
591         # Strip a source version in brackets from the source field
592         if re_strip_srcver.search(self.pkg.changes["source"]):
593             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
594
595         # Ensure the source field is a valid package name.
596         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
597             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
598
599         # Split multi-value fields into a lower-level dictionary
600         for i in ("architecture", "distribution", "binary", "closes"):
601             o = self.pkg.changes.get(i, "")
602             if o != "":
603                 del self.pkg.changes[i]
604
605             self.pkg.changes[i] = {}
606
607             for j in o.split():
608                 self.pkg.changes[i][j] = 1
609
610         # Fix the Maintainer: field to be RFC822/2047 compatible
611         try:
612             (self.pkg.changes["maintainer822"],
613              self.pkg.changes["maintainer2047"],
614              self.pkg.changes["maintainername"],
615              self.pkg.changes["maintaineremail"]) = \
616                    fix_maintainer (self.pkg.changes["maintainer"])
617         except ParseMaintError, msg:
618             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
619                    % (filename, self.pkg.changes["maintainer"], msg))
620
621         # ...likewise for the Changed-By: field if it exists.
622         try:
623             (self.pkg.changes["changedby822"],
624              self.pkg.changes["changedby2047"],
625              self.pkg.changes["changedbyname"],
626              self.pkg.changes["changedbyemail"]) = \
627                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
628         except ParseMaintError, msg:
629             self.pkg.changes["changedby822"] = ""
630             self.pkg.changes["changedby2047"] = ""
631             self.pkg.changes["changedbyname"] = ""
632             self.pkg.changes["changedbyemail"] = ""
633
634             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
635                    % (filename, self.pkg.changes["changed-by"], msg))
636
637         # Ensure all the values in Closes: are numbers
638         if self.pkg.changes.has_key("closes"):
639             for i in self.pkg.changes["closes"].keys():
640                 if re_isanum.match (i) == None:
641                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
642
643         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
644         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
645         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
646
647         # Check the .changes is non-empty
648         if not self.pkg.files:
649             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
650             return False
651
652         # Changes was syntactically valid even if we'll reject
653         return True
654
655     ###########################################################################
656
657     def check_distributions(self):
658         "Check and map the Distribution field"
659
660         Cnf = Config()
661
662         # Handle suite mappings
663         for m in Cnf.ValueList("SuiteMappings"):
664             args = m.split()
665             mtype = args[0]
666             if mtype == "map" or mtype == "silent-map":
667                 (source, dest) = args[1:3]
668                 if self.pkg.changes["distribution"].has_key(source):
669                     del self.pkg.changes["distribution"][source]
670                     self.pkg.changes["distribution"][dest] = 1
671                     if mtype != "silent-map":
672                         self.notes.append("Mapping %s to %s." % (source, dest))
673                 if self.pkg.changes.has_key("distribution-version"):
674                     if self.pkg.changes["distribution-version"].has_key(source):
675                         self.pkg.changes["distribution-version"][source]=dest
676             elif mtype == "map-unreleased":
677                 (source, dest) = args[1:3]
678                 if self.pkg.changes["distribution"].has_key(source):
679                     for arch in self.pkg.changes["architecture"].keys():
680                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
681                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
682                             del self.pkg.changes["distribution"][source]
683                             self.pkg.changes["distribution"][dest] = 1
684                             break
685             elif mtype == "ignore":
686                 suite = args[1]
687                 if self.pkg.changes["distribution"].has_key(suite):
688                     del self.pkg.changes["distribution"][suite]
689                     self.warnings.append("Ignoring %s as a target suite." % (suite))
690             elif mtype == "reject":
691                 suite = args[1]
692                 if self.pkg.changes["distribution"].has_key(suite):
693                     self.rejects.append("Uploads to %s are not accepted." % (suite))
694             elif mtype == "propup-version":
695                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
696                 #
697                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
698                 if self.pkg.changes["distribution"].has_key(args[1]):
699                     self.pkg.changes.setdefault("distribution-version", {})
700                     for suite in args[2:]:
701                         self.pkg.changes["distribution-version"][suite] = suite
702
703         # Ensure there is (still) a target distribution
704         if len(self.pkg.changes["distribution"].keys()) < 1:
705             self.rejects.append("No valid distribution remaining.")
706
707         # Ensure target distributions exist
708         for suite in self.pkg.changes["distribution"].keys():
709             if not Cnf.has_key("Suite::%s" % (suite)):
710                 self.rejects.append("Unknown distribution `%s'." % (suite))
711
712     ###########################################################################
713
714     def binary_file_checks(self, f, session):
715         cnf = Config()
716         entry = self.pkg.files[f]
717
718         # Extract package control information
719         deb_file = utils.open_file(f)
720         try:
721             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
722         except:
723             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
724             deb_file.close()
725             # Can't continue, none of the checks on control would work.
726             return
727
728         # Check for mandantory "Description:"
729         deb_file.seek(0)
730         try:
731             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
732         except:
733             self.rejects.append("%s: Missing Description in binary package" % (f))
734             return
735
736         deb_file.close()
737
738         # Check for mandatory fields
739         for field in [ "Package", "Architecture", "Version" ]:
740             if control.Find(field) == None:
741                 # Can't continue
742                 self.rejects.append("%s: No %s field in control." % (f, field))
743                 return
744
745         # Ensure the package name matches the one give in the .changes
746         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
747             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
748
749         # Validate the package field
750         package = control.Find("Package")
751         if not re_valid_pkg_name.match(package):
752             self.rejects.append("%s: invalid package name '%s'." % (f, package))
753
754         # Validate the version field
755         version = control.Find("Version")
756         if not re_valid_version.match(version):
757             self.rejects.append("%s: invalid version number '%s'." % (f, version))
758
759         # Ensure the architecture of the .deb is one we know about.
760         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
761         architecture = control.Find("Architecture")
762         upload_suite = self.pkg.changes["distribution"].keys()[0]
763
764         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
765             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
766             self.rejects.append("Unknown architecture '%s'." % (architecture))
767
768         # Ensure the architecture of the .deb is one of the ones
769         # listed in the .changes.
770         if not self.pkg.changes["architecture"].has_key(architecture):
771             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
772
773         # Sanity-check the Depends field
774         depends = control.Find("Depends")
775         if depends == '':
776             self.rejects.append("%s: Depends field is empty." % (f))
777
778         # Sanity-check the Provides field
779         provides = control.Find("Provides")
780         if provides:
781             provide = re_spacestrip.sub('', provides)
782             if provide == '':
783                 self.rejects.append("%s: Provides field is empty." % (f))
784             prov_list = provide.split(",")
785             for prov in prov_list:
786                 if not re_valid_pkg_name.match(prov):
787                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
788
789         # If there is a Built-Using field, we need to check we can find the
790         # exact source version
791         built_using = control.Find("Built-Using")
792         if built_using:
793             try:
794                 entry["built-using"] = []
795                 for dep in apt_pkg.parse_depends(built_using):
796                     bu_s, bu_v, bu_e = dep[0]
797                     # Check that it's an exact match dependency and we have
798                     # some form of version
799                     if bu_e != "=" or len(bu_v) < 1:
800                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
801                     else:
802                         # Find the source id for this version
803                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
804                         if len(bu_so) != 1:
805                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
806                         else:
807                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
808
809             except ValueError, e:
810                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
811
812
813         # Check the section & priority match those given in the .changes (non-fatal)
814         if     control.Find("Section") and entry["section"] != "" \
815            and entry["section"] != control.Find("Section"):
816             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
817                                 (f, control.Find("Section", ""), entry["section"]))
818         if control.Find("Priority") and entry["priority"] != "" \
819            and entry["priority"] != control.Find("Priority"):
820             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
821                                 (f, control.Find("Priority", ""), entry["priority"]))
822
823         entry["package"] = package
824         entry["architecture"] = architecture
825         entry["version"] = version
826         entry["maintainer"] = control.Find("Maintainer", "")
827
828         if f.endswith(".udeb"):
829             self.pkg.files[f]["dbtype"] = "udeb"
830         elif f.endswith(".deb"):
831             self.pkg.files[f]["dbtype"] = "deb"
832         else:
833             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
834
835         entry["source"] = control.Find("Source", entry["package"])
836
837         # Get the source version
838         source = entry["source"]
839         source_version = ""
840
841         if source.find("(") != -1:
842             m = re_extract_src_version.match(source)
843             source = m.group(1)
844             source_version = m.group(2)
845
846         if not source_version:
847             source_version = self.pkg.files[f]["version"]
848
849         entry["source package"] = source
850         entry["source version"] = source_version
851
852         # Ensure the filename matches the contents of the .deb
853         m = re_isadeb.match(f)
854
855         #  package name
856         file_package = m.group(1)
857         if entry["package"] != file_package:
858             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
859                                 (f, file_package, entry["dbtype"], entry["package"]))
860         epochless_version = re_no_epoch.sub('', control.Find("Version"))
861
862         #  version
863         file_version = m.group(2)
864         if epochless_version != file_version:
865             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
866                                 (f, file_version, entry["dbtype"], epochless_version))
867
868         #  architecture
869         file_architecture = m.group(3)
870         if entry["architecture"] != file_architecture:
871             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
872                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
873
874         # Check for existent source
875         source_version = entry["source version"]
876         source_package = entry["source package"]
877         if self.pkg.changes["architecture"].has_key("source"):
878             if source_version != self.pkg.changes["version"]:
879                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
880                                     (source_version, f, self.pkg.changes["version"]))
881         else:
882             # Check in the SQL database
883             if not source_exists(source_package, source_version, suites = \
884                 self.pkg.changes["distribution"].keys(), session = session):
885                 # Check in one of the other directories
886                 source_epochless_version = re_no_epoch.sub('', source_version)
887                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
888                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
889                     entry["byhand"] = 1
890                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
891                     entry["new"] = 1
892                 else:
893                     dsc_file_exists = False
894                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
895                         if cnf.has_key("Dir::Queue::%s" % (myq)):
896                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
897                                 dsc_file_exists = True
898                                 break
899
900                     if not dsc_file_exists:
901                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
902
903         # Check the version and for file overwrites
904         self.check_binary_against_db(f, session)
905
906     def source_file_checks(self, f, session):
907         entry = self.pkg.files[f]
908
909         m = re_issource.match(f)
910         if not m:
911             return
912
913         entry["package"] = m.group(1)
914         entry["version"] = m.group(2)
915         entry["type"] = m.group(3)
916
917         # Ensure the source package name matches the Source filed in the .changes
918         if self.pkg.changes["source"] != entry["package"]:
919             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
920
921         # Ensure the source version matches the version in the .changes file
922         if re_is_orig_source.match(f):
923             changes_version = self.pkg.changes["chopversion2"]
924         else:
925             changes_version = self.pkg.changes["chopversion"]
926
927         if changes_version != entry["version"]:
928             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
929
930         # Ensure the .changes lists source in the Architecture field
931         if not self.pkg.changes["architecture"].has_key("source"):
932             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
933
934         # Check the signature of a .dsc file
935         if entry["type"] == "dsc":
936             # check_signature returns either:
937             #  (None, [list, of, rejects]) or (signature, [])
938             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
939             for j in rejects:
940                 self.rejects.append(j)
941
942         entry["architecture"] = "source"
943
944     def per_suite_file_checks(self, f, suite, session):
945         cnf = Config()
946         entry = self.pkg.files[f]
947
948         # Skip byhand
949         if entry.has_key("byhand"):
950             return
951
952         # Check we have fields we need to do these checks
953         oktogo = True
954         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
955             if not entry.has_key(m):
956                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
957                 oktogo = False
958
959         if not oktogo:
960             return
961
962         # Handle component mappings
963         for m in cnf.ValueList("ComponentMappings"):
964             (source, dest) = m.split()
965             if entry["component"] == source:
966                 entry["original component"] = source
967                 entry["component"] = dest
968
969         # Ensure the component is valid for the target suite
970         if cnf.has_key("Suite:%s::Components" % (suite)) and \
971            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
972             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973             return
974
975         # Validate the component
976         if not get_component(entry["component"], session):
977             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978             return
979
980         # See if the package is NEW
981         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982             entry["new"] = 1
983
984         # Validate the priority
985         if entry["priority"].find('/') != -1:
986             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
987
988         # Determine the location
989         location = cnf["Dir::Pool"]
990         l = get_location(location, entry["component"], session=session)
991         if l is None:
992             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993             entry["location id"] = -1
994         else:
995             entry["location id"] = l.location_id
996
997         # Check the md5sum & size against existing files (if any)
998         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
999
1000         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001                                          entry["size"], entry["md5sum"], entry["location id"])
1002
1003         if found is None:
1004             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005         elif found is False and poolfile is not None:
1006             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1007         else:
1008             if poolfile is None:
1009                 entry["files id"] = None
1010             else:
1011                 entry["files id"] = poolfile.file_id
1012
1013         # Check for packages that have moved from one component to another
1014         entry['suite'] = suite
1015         arch_list = [entry["architecture"], 'all']
1016         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017             [suite], arch_list = arch_list, session = session)
1018         if component is not None:
1019             entry["othercomponents"] = component
1020
1021     def check_files(self, action=True):
1022         file_keys = self.pkg.files.keys()
1023         holding = Holding()
1024         cnf = Config()
1025
1026         if action:
1027             cwd = os.getcwd()
1028             os.chdir(self.pkg.directory)
1029             for f in file_keys:
1030                 ret = holding.copy_to_holding(f)
1031                 if ret is not None:
1032                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1033
1034             os.chdir(cwd)
1035
1036         # check we already know the changes file
1037         # [NB: this check must be done post-suite mapping]
1038         base_filename = os.path.basename(self.pkg.changes_file)
1039
1040         session = DBConn().session()
1041
1042         try:
1043             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044             # if in the pool or in a queue other than unchecked, reject
1045             if (dbc.in_queue is None) \
1046                    or (dbc.in_queue is not None
1047                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048                 self.rejects.append("%s file already known to dak" % base_filename)
1049         except NoResultFound, e:
1050             # not known, good
1051             pass
1052
1053         has_binaries = False
1054         has_source = False
1055
1056         for f, entry in self.pkg.files.items():
1057             # Ensure the file does not already exist in one of the accepted directories
1058             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1059                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1060                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1061                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1062
1063             if not re_taint_free.match(f):
1064                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1065
1066             # Check the file is readable
1067             if os.access(f, os.R_OK) == 0:
1068                 # When running in -n, copy_to_holding() won't have
1069                 # generated the reject_message, so we need to.
1070                 if action:
1071                     if os.path.exists(f):
1072                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1073                     else:
1074                         # Don't directly reject, mark to check later to deal with orig's
1075                         # we can find in the pool
1076                         self.later_check_files.append(f)
1077                 entry["type"] = "unreadable"
1078                 continue
1079
1080             # If it's byhand skip remaining checks
1081             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1082                 entry["byhand"] = 1
1083                 entry["type"] = "byhand"
1084
1085             # Checks for a binary package...
1086             elif re_isadeb.match(f):
1087                 has_binaries = True
1088                 entry["type"] = "deb"
1089
1090                 # This routine appends to self.rejects/warnings as appropriate
1091                 self.binary_file_checks(f, session)
1092
1093             # Checks for a source package...
1094             elif re_issource.match(f):
1095                 has_source = True
1096
1097                 # This routine appends to self.rejects/warnings as appropriate
1098                 self.source_file_checks(f, session)
1099
1100             # Not a binary or source package?  Assume byhand...
1101             else:
1102                 entry["byhand"] = 1
1103                 entry["type"] = "byhand"
1104
1105             # Per-suite file checks
1106             entry["oldfiles"] = {}
1107             for suite in self.pkg.changes["distribution"].keys():
1108                 self.per_suite_file_checks(f, suite, session)
1109
1110         session.close()
1111
1112         # If the .changes file says it has source, it must have source.
1113         if self.pkg.changes["architecture"].has_key("source"):
1114             if not has_source:
1115                 self.rejects.append("no source found and Architecture line in changes mention source.")
1116
1117             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1118                 self.rejects.append("source only uploads are not supported.")
1119
1120     ###########################################################################
1121
1122     def __dsc_filename(self):
1123         """
1124         Returns: (Status, Dsc_Filename)
1125         where
1126           Status: Boolean; True when there was no error, False otherwise
1127           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1128         """
1129         dsc_filename = None
1130
1131         # find the dsc
1132         for name, entry in self.pkg.files.items():
1133             if entry.has_key("type") and entry["type"] == "dsc":
1134                 if dsc_filename:
1135                     return False, "cannot process a .changes file with multiple .dsc's."
1136                 else:
1137                     dsc_filename = name
1138
1139         if not dsc_filename:
1140             return False, "source uploads must contain a dsc file"
1141
1142         return True, dsc_filename
1143
1144     def load_dsc(self, action=True, signing_rules=1):
1145         """
1146         Find and load the dsc from self.pkg.files into self.dsc
1147
1148         Returns: (Status, Reason)
1149         where
1150           Status: Boolean; True when there was no error, False otherwise
1151           Reason: String; When Status is False this describes the error
1152         """
1153
1154         # find the dsc
1155         (status, dsc_filename) = self.__dsc_filename()
1156         if not status:
1157             # If status is false, dsc_filename has the reason
1158             return False, dsc_filename
1159
1160         try:
1161             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1162         except CantOpenError:
1163             if not action:
1164                 return False, "%s: can't read file." % (dsc_filename)
1165         except ParseChangesError, line:
1166             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1167         except InvalidDscError, line:
1168             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1169         except ChangesUnicodeError:
1170             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1171
1172         return True, None
1173
1174     ###########################################################################
1175
1176     def check_dsc(self, action=True, session=None):
1177         """Returns bool indicating whether or not the source changes are valid"""
1178         # Ensure there is source to check
1179         if not self.pkg.changes["architecture"].has_key("source"):
1180             return True
1181
1182         (status, reason) = self.load_dsc(action=action)
1183         if not status:
1184             self.rejects.append(reason)
1185             return False
1186         (status, dsc_filename) = self.__dsc_filename()
1187         if not status:
1188             # If status is false, dsc_filename has the reason
1189             self.rejects.append(dsc_filename)
1190             return False
1191
1192         # Build up the file list of files mentioned by the .dsc
1193         try:
1194             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1195         except NoFilesFieldError:
1196             self.rejects.append("%s: no Files: field." % (dsc_filename))
1197             return False
1198         except UnknownFormatError, format:
1199             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1200             return False
1201         except ParseChangesError, line:
1202             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1203             return False
1204
1205         # Enforce mandatory fields
1206         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1207             if not self.pkg.dsc.has_key(i):
1208                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1209                 return False
1210
1211         # Validate the source and version fields
1212         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1213             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1214         if not re_valid_version.match(self.pkg.dsc["version"]):
1215             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1216
1217         # Only a limited list of source formats are allowed in each suite
1218         for dist in self.pkg.changes["distribution"].keys():
1219             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1220             if self.pkg.dsc["format"] not in allowed:
1221                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1222
1223         # Validate the Maintainer field
1224         try:
1225             # We ignore the return value
1226             fix_maintainer(self.pkg.dsc["maintainer"])
1227         except ParseMaintError, msg:
1228             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1229                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1230
1231         # Validate the build-depends field(s)
1232         for field_name in [ "build-depends", "build-depends-indep" ]:
1233             field = self.pkg.dsc.get(field_name)
1234             if field:
1235                 # Have apt try to parse them...
1236                 try:
1237                     apt_pkg.ParseSrcDepends(field)
1238                 except:
1239                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1240
1241         # Ensure the version number in the .dsc matches the version number in the .changes
1242         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1243         changes_version = self.pkg.files[dsc_filename]["version"]
1244
1245         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1246             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1247
1248         # Ensure the Files field contain only what's expected
1249         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1250
1251         # Ensure source is newer than existing source in target suites
1252         session = DBConn().session()
1253         self.check_source_against_db(dsc_filename, session)
1254         self.check_dsc_against_db(dsc_filename, session)
1255
1256         dbchg = get_dbchange(self.pkg.changes_file, session)
1257
1258         # Finally, check if we're missing any files
1259         for f in self.later_check_files:
1260             print 'XXX: %s' % f
1261             # Check if we've already processed this file if we have a dbchg object
1262             ok = False
1263             if dbchg:
1264                 for pf in dbchg.files:
1265                     if pf.filename == f and pf.processed:
1266                         self.notes.append('%s was already processed so we can go ahead' % f)
1267                         ok = True
1268                         del self.pkg.files[f]
1269             if not ok:
1270                 self.rejects.append("Could not find file %s references in changes" % f)
1271
1272         session.close()
1273
1274         return True
1275
1276     ###########################################################################
1277
1278     def get_changelog_versions(self, source_dir):
1279         """Extracts a the source package and (optionally) grabs the
1280         version history out of debian/changelog for the BTS."""
1281
1282         cnf = Config()
1283
1284         # Find the .dsc (again)
1285         dsc_filename = None
1286         for f in self.pkg.files.keys():
1287             if self.pkg.files[f]["type"] == "dsc":
1288                 dsc_filename = f
1289
1290         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1291         if not dsc_filename:
1292             return
1293
1294         # Create a symlink mirror of the source files in our temporary directory
1295         for f in self.pkg.files.keys():
1296             m = re_issource.match(f)
1297             if m:
1298                 src = os.path.join(source_dir, f)
1299                 # If a file is missing for whatever reason, give up.
1300                 if not os.path.exists(src):
1301                     return
1302                 ftype = m.group(3)
1303                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1304                    self.pkg.orig_files[f].has_key("path"):
1305                     continue
1306                 dest = os.path.join(os.getcwd(), f)
1307                 os.symlink(src, dest)
1308
1309         # If the orig files are not a part of the upload, create symlinks to the
1310         # existing copies.
1311         for orig_file in self.pkg.orig_files.keys():
1312             if not self.pkg.orig_files[orig_file].has_key("path"):
1313                 continue
1314             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1315             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1316
1317         # Extract the source
1318         try:
1319             unpacked = UnpackedSource(dsc_filename)
1320         except:
1321             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1322             return
1323
1324         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1325             return
1326
1327         # Get the upstream version
1328         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1329         if re_strip_revision.search(upstr_version):
1330             upstr_version = re_strip_revision.sub('', upstr_version)
1331
1332         # Ensure the changelog file exists
1333         changelog_file = unpacked.get_changelog_file()
1334         if changelog_file is None:
1335             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1336             return
1337
1338         # Parse the changelog
1339         self.pkg.dsc["bts changelog"] = ""
1340         for line in changelog_file.readlines():
1341             m = re_changelog_versions.match(line)
1342             if m:
1343                 self.pkg.dsc["bts changelog"] += line
1344         changelog_file.close()
1345         unpacked.cleanup()
1346
1347         # Check we found at least one revision in the changelog
1348         if not self.pkg.dsc["bts changelog"]:
1349             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1350
1351     def check_source(self):
1352         # Bail out if:
1353         #    a) there's no source
1354         if not self.pkg.changes["architecture"].has_key("source"):
1355             return
1356
1357         tmpdir = utils.temp_dirname()
1358
1359         # Move into the temporary directory
1360         cwd = os.getcwd()
1361         os.chdir(tmpdir)
1362
1363         # Get the changelog version history
1364         self.get_changelog_versions(cwd)
1365
1366         # Move back and cleanup the temporary tree
1367         os.chdir(cwd)
1368
1369         try:
1370             shutil.rmtree(tmpdir)
1371         except OSError, e:
1372             if e.errno != errno.EACCES:
1373                 print "foobar"
1374                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1375
1376             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1377             # We probably have u-r or u-w directories so chmod everything
1378             # and try again.
1379             cmd = "chmod -R u+rwx %s" % (tmpdir)
1380             result = os.system(cmd)
1381             if result != 0:
1382                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1383             shutil.rmtree(tmpdir)
1384         except Exception, e:
1385             print "foobar2 (%s)" % e
1386             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1387
1388     ###########################################################################
1389     def ensure_hashes(self):
1390         # Make sure we recognise the format of the Files: field in the .changes
1391         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1392         if len(format) == 2:
1393             format = int(format[0]), int(format[1])
1394         else:
1395             format = int(float(format[0])), 0
1396
1397         # We need to deal with the original changes blob, as the fields we need
1398         # might not be in the changes dict serialised into the .dak anymore.
1399         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1400
1401         # Copy the checksums over to the current changes dict.  This will keep
1402         # the existing modifications to it intact.
1403         for field in orig_changes:
1404             if field.startswith('checksums-'):
1405                 self.pkg.changes[field] = orig_changes[field]
1406
1407         # Check for unsupported hashes
1408         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1409             self.rejects.append(j)
1410
1411         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1412             self.rejects.append(j)
1413
1414         # We have to calculate the hash if we have an earlier changes version than
1415         # the hash appears in rather than require it exist in the changes file
1416         for hashname, hashfunc, version in utils.known_hashes:
1417             # TODO: Move _ensure_changes_hash into this class
1418             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1419                 self.rejects.append(j)
1420             if "source" in self.pkg.changes["architecture"]:
1421                 # TODO: Move _ensure_dsc_hash into this class
1422                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1423                     self.rejects.append(j)
1424
1425     def check_hashes(self):
1426         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1427             self.rejects.append(m)
1428
1429         for m in utils.check_size(".changes", self.pkg.files):
1430             self.rejects.append(m)
1431
1432         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1433             self.rejects.append(m)
1434
1435         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1436             self.rejects.append(m)
1437
1438         self.ensure_hashes()
1439
1440     ###########################################################################
1441
1442     def ensure_orig(self, target_dir='.', session=None):
1443         """
1444         Ensures that all orig files mentioned in the changes file are present
1445         in target_dir. If they do not exist, they are symlinked into place.
1446
1447         An list containing the symlinks that were created are returned (so they
1448         can be removed).
1449         """
1450
1451         symlinked = []
1452         cnf = Config()
1453
1454         for filename, entry in self.pkg.dsc_files.iteritems():
1455             if not re_is_orig_source.match(filename):
1456                 # File is not an orig; ignore
1457                 continue
1458
1459             if os.path.exists(filename):
1460                 # File exists, no need to continue
1461                 continue
1462
1463             def symlink_if_valid(path):
1464                 f = utils.open_file(path)
1465                 md5sum = apt_pkg.md5sum(f)
1466                 f.close()
1467
1468                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1469                 expected = (int(entry['size']), entry['md5sum'])
1470
1471                 if fingerprint != expected:
1472                     return False
1473
1474                 dest = os.path.join(target_dir, filename)
1475
1476                 os.symlink(path, dest)
1477                 symlinked.append(dest)
1478
1479                 return True
1480
1481             session_ = session
1482             if session is None:
1483                 session_ = DBConn().session()
1484
1485             found = False
1486
1487             # Look in the pool
1488             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1489                 poolfile_path = os.path.join(
1490                     poolfile.location.path, poolfile.filename
1491                 )
1492
1493                 if symlink_if_valid(poolfile_path):
1494                     found = True
1495                     break
1496
1497             if session is None:
1498                 session_.close()
1499
1500             if found:
1501                 continue
1502
1503             # Look in some other queues for the file
1504             queues = ('New', 'Byhand', 'ProposedUpdates',
1505                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1506
1507             for queue in queues:
1508                 if not cnf.get('Dir::Queue::%s' % queue):
1509                     continue
1510
1511                 queuefile_path = os.path.join(
1512                     cnf['Dir::Queue::%s' % queue], filename
1513                 )
1514
1515                 if not os.path.exists(queuefile_path):
1516                     # Does not exist in this queue
1517                     continue
1518
1519                 if symlink_if_valid(queuefile_path):
1520                     break
1521
1522         return symlinked
1523
1524     ###########################################################################
1525
1526     def check_lintian(self):
1527         """
1528         Extends self.rejects by checking the output of lintian against tags
1529         specified in Dinstall::LintianTags.
1530         """
1531
1532         cnf = Config()
1533
1534         # Don't reject binary uploads
1535         if not self.pkg.changes['architecture'].has_key('source'):
1536             return
1537
1538         # Only check some distributions
1539         for dist in ('unstable', 'experimental'):
1540             if dist in self.pkg.changes['distribution']:
1541                 break
1542         else:
1543             return
1544
1545         # If we do not have a tagfile, don't do anything
1546         tagfile = cnf.get("Dinstall::LintianTags")
1547         if not tagfile:
1548             return
1549
1550         # Parse the yaml file
1551         sourcefile = file(tagfile, 'r')
1552         sourcecontent = sourcefile.read()
1553         sourcefile.close()
1554
1555         try:
1556             lintiantags = yaml.load(sourcecontent)['lintian']
1557         except yaml.YAMLError, msg:
1558             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1559             return
1560
1561         # Try and find all orig mentioned in the .dsc
1562         symlinked = self.ensure_orig()
1563
1564         # Setup the input file for lintian
1565         fd, temp_filename = utils.temp_filename()
1566         temptagfile = os.fdopen(fd, 'w')
1567         for tags in lintiantags.values():
1568             temptagfile.writelines(['%s\n' % x for x in tags])
1569         temptagfile.close()
1570
1571         try:
1572             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1573                 (temp_filename, self.pkg.changes_file)
1574
1575             result, output = commands.getstatusoutput(cmd)
1576         finally:
1577             # Remove our tempfile and any symlinks we created
1578             os.unlink(temp_filename)
1579
1580             for symlink in symlinked:
1581                 os.unlink(symlink)
1582
1583         if result == 2:
1584             utils.warn("lintian failed for %s [return code: %s]." % \
1585                 (self.pkg.changes_file, result))
1586             utils.warn(utils.prefix_multi_line_string(output, \
1587                 " [possible output:] "))
1588
1589         def log(*txt):
1590             if self.logger:
1591                 self.logger.log(
1592                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1593                 )
1594
1595         # Generate messages
1596         parsed_tags = parse_lintian_output(output)
1597         self.rejects.extend(
1598             generate_reject_messages(parsed_tags, lintiantags, log=log)
1599         )
1600
1601     ###########################################################################
1602     def check_urgency(self):
1603         cnf = Config()
1604         if self.pkg.changes["architecture"].has_key("source"):
1605             if not self.pkg.changes.has_key("urgency"):
1606                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1607             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1608             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1609                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1610                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1611                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1612
1613     ###########################################################################
1614
1615     # Sanity check the time stamps of files inside debs.
1616     # [Files in the near future cause ugly warnings and extreme time
1617     #  travel can cause errors on extraction]
1618
1619     def check_timestamps(self):
1620         Cnf = Config()
1621
1622         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1623         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1624         tar = TarTime(future_cutoff, past_cutoff)
1625
1626         for filename, entry in self.pkg.files.items():
1627             if entry["type"] == "deb":
1628                 tar.reset()
1629                 try:
1630                     deb_file = utils.open_file(filename)
1631                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1632                     deb_file.seek(0)
1633                     try:
1634                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1635                     except SystemError, e:
1636                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1637                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1638                             raise
1639                         deb_file.seek(0)
1640                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1641
1642                     deb_file.close()
1643
1644                     future_files = tar.future_files.keys()
1645                     if future_files:
1646                         num_future_files = len(future_files)
1647                         future_file = future_files[0]
1648                         future_date = tar.future_files[future_file]
1649                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1650                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1651
1652                     ancient_files = tar.ancient_files.keys()
1653                     if ancient_files:
1654                         num_ancient_files = len(ancient_files)
1655                         ancient_file = ancient_files[0]
1656                         ancient_date = tar.ancient_files[ancient_file]
1657                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1658                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1659                 except:
1660                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1661
1662     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1663         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1664             sponsored = False
1665         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1666             sponsored = False
1667             if uid_name == "":
1668                 sponsored = True
1669         else:
1670             sponsored = True
1671             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1672                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1673                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1674                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1675                         self.pkg.changes["sponsoremail"] = uid_email
1676
1677         return sponsored
1678
1679
1680     ###########################################################################
1681     # check_signed_by_key checks
1682     ###########################################################################
1683
1684     def check_signed_by_key(self):
1685         """Ensure the .changes is signed by an authorized uploader."""
1686         session = DBConn().session()
1687
1688         # First of all we check that the person has proper upload permissions
1689         # and that this upload isn't blocked
1690         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1691
1692         if fpr is None:
1693             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1694             return
1695
1696         # TODO: Check that import-keyring adds UIDs properly
1697         if not fpr.uid:
1698             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1699             return
1700
1701         # Check that the fingerprint which uploaded has permission to do so
1702         self.check_upload_permissions(fpr, session)
1703
1704         # Check that this package is not in a transition
1705         self.check_transition(session)
1706
1707         session.close()
1708
1709
1710     def check_upload_permissions(self, fpr, session):
1711         # Check any one-off upload blocks
1712         self.check_upload_blocks(fpr, session)
1713
1714         # Start with DM as a special case
1715         # DM is a special case unfortunately, so we check it first
1716         # (keys with no source access get more access than DMs in one
1717         #  way; DMs can only upload for their packages whether source
1718         #  or binary, whereas keys with no access might be able to
1719         #  upload some binaries)
1720         if fpr.source_acl.access_level == 'dm':
1721             self.check_dm_upload(fpr, session)
1722         else:
1723             # Check source-based permissions for other types
1724             if self.pkg.changes["architecture"].has_key("source") and \
1725                 fpr.source_acl.access_level is None:
1726                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1727                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1728                 self.rejects.append(rej)
1729                 return
1730             # If not a DM, we allow full upload rights
1731             uid_email = "%s@debian.org" % (fpr.uid.uid)
1732             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1733
1734
1735         # Check binary upload permissions
1736         # By this point we know that DMs can't have got here unless they
1737         # are allowed to deal with the package concerned so just apply
1738         # normal checks
1739         if fpr.binary_acl.access_level == 'full':
1740             return
1741
1742         # Otherwise we're in the map case
1743         tmparches = self.pkg.changes["architecture"].copy()
1744         tmparches.pop('source', None)
1745
1746         for bam in fpr.binary_acl_map:
1747             tmparches.pop(bam.architecture.arch_string, None)
1748
1749         if len(tmparches.keys()) > 0:
1750             if fpr.binary_reject:
1751                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1752                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1753                 self.rejects.append(rej)
1754             else:
1755                 # TODO: This is where we'll implement reject vs throw away binaries later
1756                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1757                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1758                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1759                 self.rejects.append(rej)
1760
1761
1762     def check_upload_blocks(self, fpr, session):
1763         """Check whether any upload blocks apply to this source, source
1764            version, uid / fpr combination"""
1765
1766         def block_rej_template(fb):
1767             rej = 'Manual upload block in place for package %s' % fb.source
1768             if fb.version is not None:
1769                 rej += ', version %s' % fb.version
1770             return rej
1771
1772         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1773             # version is None if the block applies to all versions
1774             if fb.version is None or fb.version == self.pkg.changes['version']:
1775                 # Check both fpr and uid - either is enough to cause a reject
1776                 if fb.fpr is not None:
1777                     if fb.fpr.fingerprint == fpr.fingerprint:
1778                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1779                 if fb.uid is not None:
1780                     if fb.uid == fpr.uid:
1781                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1782
1783
1784     def check_dm_upload(self, fpr, session):
1785         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1786         ## none of the uploaded packages are NEW
1787         rej = False
1788         for f in self.pkg.files.keys():
1789             if self.pkg.files[f].has_key("byhand"):
1790                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1791                 rej = True
1792             if self.pkg.files[f].has_key("new"):
1793                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1794                 rej = True
1795
1796         if rej:
1797             return
1798
1799         r = get_newest_source(self.pkg.changes["source"], session)
1800
1801         if r is None:
1802             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1803             self.rejects.append(rej)
1804             return
1805
1806         if not r.dm_upload_allowed:
1807             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1808             self.rejects.append(rej)
1809             return
1810
1811         ## the Maintainer: field of the uploaded .changes file corresponds with
1812         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1813         ## uploads)
1814         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1815             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1816
1817         ## the most recent version of the package uploaded to unstable or
1818         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1819         ## non-developer maintainers cannot NMU or hijack packages)
1820
1821         # srcuploaders includes the maintainer
1822         accept = False
1823         for sup in r.srcuploaders:
1824             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1825             # Eww - I hope we never have two people with the same name in Debian
1826             if email == fpr.uid.uid or name == fpr.uid.name:
1827                 accept = True
1828                 break
1829
1830         if not accept:
1831             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1832             return
1833
1834         ## none of the packages are being taken over from other source packages
1835         for b in self.pkg.changes["binary"].keys():
1836             for suite in self.pkg.changes["distribution"].keys():
1837                 for s in get_source_by_package_and_suite(b, suite, session):
1838                     if s.source != self.pkg.changes["source"]:
1839                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1840
1841
1842
1843     def check_transition(self, session):
1844         cnf = Config()
1845
1846         sourcepkg = self.pkg.changes["source"]
1847
1848         # No sourceful upload -> no need to do anything else, direct return
1849         # We also work with unstable uploads, not experimental or those going to some
1850         # proposed-updates queue
1851         if "source" not in self.pkg.changes["architecture"] or \
1852            "unstable" not in self.pkg.changes["distribution"]:
1853             return
1854
1855         # Also only check if there is a file defined (and existant) with
1856         # checks.
1857         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1858         if transpath == "" or not os.path.exists(transpath):
1859             return
1860
1861         # Parse the yaml file
1862         sourcefile = file(transpath, 'r')
1863         sourcecontent = sourcefile.read()
1864         try:
1865             transitions = yaml.load(sourcecontent)
1866         except yaml.YAMLError, msg:
1867             # This shouldn't happen, there is a wrapper to edit the file which
1868             # checks it, but we prefer to be safe than ending up rejecting
1869             # everything.
1870             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1871             return
1872
1873         # Now look through all defined transitions
1874         for trans in transitions:
1875             t = transitions[trans]
1876             source = t["source"]
1877             expected = t["new"]
1878
1879             # Will be None if nothing is in testing.
1880             current = get_source_in_suite(source, "testing", session)
1881             if current is not None:
1882                 compare = apt_pkg.VersionCompare(current.version, expected)
1883
1884             if current is None or compare < 0:
1885                 # This is still valid, the current version in testing is older than
1886                 # the new version we wait for, or there is none in testing yet
1887
1888                 # Check if the source we look at is affected by this.
1889                 if sourcepkg in t['packages']:
1890                     # The source is affected, lets reject it.
1891
1892                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1893                         sourcepkg, trans)
1894
1895                     if current is not None:
1896                         currentlymsg = "at version %s" % (current.version)
1897                     else:
1898                         currentlymsg = "not present in testing"
1899
1900                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1901
1902                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1903 is part of a testing transition designed to get %s migrated (it is
1904 currently %s, we need version %s).  This transition is managed by the
1905 Release Team, and %s is the Release-Team member responsible for it.
1906 Please mail debian-release@lists.debian.org or contact %s directly if you
1907 need further assistance.  You might want to upload to experimental until this
1908 transition is done."""
1909                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1910
1911                     self.rejects.append(rejectmsg)
1912                     return
1913
1914     ###########################################################################
1915     # End check_signed_by_key checks
1916     ###########################################################################
1917
1918     def build_summaries(self):
1919         """ Build a summary of changes the upload introduces. """
1920
1921         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1922
1923         short_summary = summary
1924
1925         # This is for direport's benefit...
1926         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1927
1928         if byhand or new:
1929             summary += "Changes: " + f
1930
1931         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1932
1933         summary += self.announce(short_summary, 0)
1934
1935         return (summary, short_summary)
1936
1937     ###########################################################################
1938
1939     def close_bugs(self, summary, action):
1940         """
1941         Send mail to close bugs as instructed by the closes field in the changes file.
1942         Also add a line to summary if any work was done.
1943
1944         @type summary: string
1945         @param summary: summary text, as given by L{build_summaries}
1946
1947         @type action: bool
1948         @param action: Set to false no real action will be done.
1949
1950         @rtype: string
1951         @return: summary. If action was taken, extended by the list of closed bugs.
1952
1953         """
1954
1955         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1956
1957         bugs = self.pkg.changes["closes"].keys()
1958
1959         if not bugs:
1960             return summary
1961
1962         bugs.sort()
1963         summary += "Closing bugs: "
1964         for bug in bugs:
1965             summary += "%s " % (bug)
1966             if action:
1967                 self.update_subst()
1968                 self.Subst["__BUG_NUMBER__"] = bug
1969                 if self.pkg.changes["distribution"].has_key("stable"):
1970                     self.Subst["__STABLE_WARNING__"] = """
1971 Note that this package is not part of the released stable Debian
1972 distribution.  It may have dependencies on other unreleased software,
1973 or other instabilities.  Please take care if you wish to install it.
1974 The update will eventually make its way into the next released Debian
1975 distribution."""
1976                 else:
1977                     self.Subst["__STABLE_WARNING__"] = ""
1978                 mail_message = utils.TemplateSubst(self.Subst, template)
1979                 utils.send_mail(mail_message)
1980
1981                 # Clear up after ourselves
1982                 del self.Subst["__BUG_NUMBER__"]
1983                 del self.Subst["__STABLE_WARNING__"]
1984
1985         if action and self.logger:
1986             self.logger.log(["closing bugs"] + bugs)
1987
1988         summary += "\n"
1989
1990         return summary
1991
1992     ###########################################################################
1993
1994     def announce(self, short_summary, action):
1995         """
1996         Send an announce mail about a new upload.
1997
1998         @type short_summary: string
1999         @param short_summary: Short summary text to include in the mail
2000
2001         @type action: bool
2002         @param action: Set to false no real action will be done.
2003
2004         @rtype: string
2005         @return: Textstring about action taken.
2006
2007         """
2008
2009         cnf = Config()
2010         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2011
2012         # Only do announcements for source uploads with a recent dpkg-dev installed
2013         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2014            self.pkg.changes["architecture"].has_key("source"):
2015             return ""
2016
2017         lists_done = {}
2018         summary = ""
2019
2020         self.Subst["__SHORT_SUMMARY__"] = short_summary
2021
2022         for dist in self.pkg.changes["distribution"].keys():
2023             suite = get_suite(dist)
2024             if suite is None: continue
2025             announce_list = suite.announce
2026             if announce_list == "" or lists_done.has_key(announce_list):
2027                 continue
2028
2029             lists_done[announce_list] = 1
2030             summary += "Announcing to %s\n" % (announce_list)
2031
2032             if action:
2033                 self.update_subst()
2034                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2035                 if cnf.get("Dinstall::TrackingServer") and \
2036                    self.pkg.changes["architecture"].has_key("source"):
2037                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2038                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2039
2040                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2041                 utils.send_mail(mail_message)
2042
2043                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2044
2045         if cnf.FindB("Dinstall::CloseBugs"):
2046             summary = self.close_bugs(summary, action)
2047
2048         del self.Subst["__SHORT_SUMMARY__"]
2049
2050         return summary
2051
2052     ###########################################################################
2053     @session_wrapper
2054     def accept (self, summary, short_summary, session=None):
2055         """
2056         Accept an upload.
2057
2058         This moves all files referenced from the .changes into the pool,
2059         sends the accepted mail, announces to lists, closes bugs and
2060         also checks for override disparities. If enabled it will write out
2061         the version history for the BTS Version Tracking and will finally call
2062         L{queue_build}.
2063
2064         @type summary: string
2065         @param summary: Summary text
2066
2067         @type short_summary: string
2068         @param short_summary: Short summary
2069         """
2070
2071         cnf = Config()
2072         stats = SummaryStats()
2073
2074         print "Installing."
2075         self.logger.log(["installing changes", self.pkg.changes_file])
2076
2077         binaries = []
2078         poolfiles = []
2079
2080         # Add the .dsc file to the DB first
2081         for newfile, entry in self.pkg.files.items():
2082             if entry["type"] == "dsc":
2083                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2084                 for j in pfs:
2085                     poolfiles.append(j)
2086
2087         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2088         for newfile, entry in self.pkg.files.items():
2089             if entry["type"] == "deb":
2090                 b, pf = add_deb_to_db(self, newfile, session)
2091                 binaries.append(b)
2092                 poolfiles.append(pf)
2093
2094         # If this is a sourceful diff only upload that is moving
2095         # cross-component we need to copy the .orig files into the new
2096         # component too for the same reasons as above.
2097         # XXX: mhy: I think this should be in add_dsc_to_db
2098         if self.pkg.changes["architecture"].has_key("source"):
2099             for orig_file in self.pkg.orig_files.keys():
2100                 if not self.pkg.orig_files[orig_file].has_key("id"):
2101                     continue # Skip if it's not in the pool
2102                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2103                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2104                     continue # Skip if the location didn't change
2105
2106                 # Do the move
2107                 oldf = get_poolfile_by_id(orig_file_id, session)
2108                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2109                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2110                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2111
2112                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2113
2114                 # TODO: Care about size/md5sum collisions etc
2115                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2116
2117                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2118                 if newf is None:
2119                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2120                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2121
2122                     session.flush()
2123
2124                     # Don't reference the old file from this changes
2125                     for p in poolfiles:
2126                         if p.file_id == oldf.file_id:
2127                             poolfiles.remove(p)
2128
2129                     poolfiles.append(newf)
2130
2131                     # Fix up the DSC references
2132                     toremove = []
2133
2134                     for df in source.srcfiles:
2135                         if df.poolfile.file_id == oldf.file_id:
2136                             # Add a new DSC entry and mark the old one for deletion
2137                             # Don't do it in the loop so we don't change the thing we're iterating over
2138                             newdscf = DSCFile()
2139                             newdscf.source_id = source.source_id
2140                             newdscf.poolfile_id = newf.file_id
2141                             session.add(newdscf)
2142
2143                             toremove.append(df)
2144
2145                     for df in toremove:
2146                         session.delete(df)
2147
2148                     # Flush our changes
2149                     session.flush()
2150
2151                     # Make sure that our source object is up-to-date
2152                     session.expire(source)
2153
2154         # Add changelog information to the database
2155         self.store_changelog()
2156
2157         # Install the files into the pool
2158         for newfile, entry in self.pkg.files.items():
2159             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2160             utils.move(newfile, destination)
2161             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2162             stats.accept_bytes += float(entry["size"])
2163
2164         # Copy the .changes file across for suite which need it.
2165         copy_changes = dict([(x.copychanges, '')
2166                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2167                              if x.copychanges is not None])
2168
2169         for dest in copy_changes.keys():
2170             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2171
2172         # We're done - commit the database changes
2173         session.commit()
2174         # Our SQL session will automatically start a new transaction after
2175         # the last commit
2176
2177         # Now ensure that the metadata has been added
2178         # This has to be done after we copy the files into the pool
2179         # For source if we have it:
2180         if self.pkg.changes["architecture"].has_key("source"):
2181             import_metadata_into_db(source, session)
2182
2183         # Now for any of our binaries
2184         for b in binaries:
2185             import_metadata_into_db(b, session)
2186
2187         session.commit()
2188
2189         # Move the .changes into the 'done' directory
2190         utils.move(self.pkg.changes_file,
2191                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2192
2193         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2194             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2195
2196         self.update_subst()
2197         self.Subst["__SUMMARY__"] = summary
2198         mail_message = utils.TemplateSubst(self.Subst,
2199                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2200         utils.send_mail(mail_message)
2201         self.announce(short_summary, 1)
2202
2203         ## Helper stuff for DebBugs Version Tracking
2204         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2205             if self.pkg.changes["architecture"].has_key("source"):
2206                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2207                 version_history = os.fdopen(fd, 'w')
2208                 version_history.write(self.pkg.dsc["bts changelog"])
2209                 version_history.close()
2210                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2211                                       self.pkg.changes_file[:-8]+".versions")
2212                 os.rename(temp_filename, filename)
2213                 os.chmod(filename, 0644)
2214
2215             # Write out the binary -> source mapping.
2216             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2217             debinfo = os.fdopen(fd, 'w')
2218             for name, entry in sorted(self.pkg.files.items()):
2219                 if entry["type"] == "deb":
2220                     line = " ".join([entry["package"], entry["version"],
2221                                      entry["architecture"], entry["source package"],
2222                                      entry["source version"]])
2223                     debinfo.write(line+"\n")
2224             debinfo.close()
2225             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2226                                   self.pkg.changes_file[:-8]+".debinfo")
2227             os.rename(temp_filename, filename)
2228             os.chmod(filename, 0644)
2229
2230         session.commit()
2231
2232         # Set up our copy queues (e.g. buildd queues)
2233         for suite_name in self.pkg.changes["distribution"].keys():
2234             suite = get_suite(suite_name, session)
2235             for q in suite.copy_queues:
2236                 for f in poolfiles:
2237                     q.add_file_from_pool(f)
2238
2239         session.commit()
2240
2241         # Finally...
2242         stats.accept_count += 1
2243
2244     def check_override(self):
2245         """
2246         Checks override entries for validity. Mails "Override disparity" warnings,
2247         if that feature is enabled.
2248
2249         Abandons the check if
2250           - override disparity checks are disabled
2251           - mail sending is disabled
2252         """
2253
2254         cnf = Config()
2255
2256         # Abandon the check if override disparity checks have been disabled
2257         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2258             return
2259
2260         summary = self.pkg.check_override()
2261
2262         if summary == "":
2263             return
2264
2265         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2266
2267         self.update_subst()
2268         self.Subst["__SUMMARY__"] = summary
2269         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2270         utils.send_mail(mail_message)
2271         del self.Subst["__SUMMARY__"]
2272
2273     ###########################################################################
2274
2275     def remove(self, from_dir=None):
2276         """
2277         Used (for instance) in p-u to remove the package from unchecked
2278
2279         Also removes the package from holding area.
2280         """
2281         if from_dir is None:
2282             from_dir = self.pkg.directory
2283         h = Holding()
2284
2285         for f in self.pkg.files.keys():
2286             os.unlink(os.path.join(from_dir, f))
2287             if os.path.exists(os.path.join(h.holding_dir, f)):
2288                 os.unlink(os.path.join(h.holding_dir, f))
2289
2290         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2291         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2292             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2293
2294     ###########################################################################
2295
2296     def move_to_queue (self, queue):
2297         """
2298         Move files to a destination queue using the permissions in the table
2299         """
2300         h = Holding()
2301         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2302                    queue.path, perms=int(queue.change_perms, 8))
2303         for f in self.pkg.files.keys():
2304             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2305
2306     ###########################################################################
2307
2308     def force_reject(self, reject_files):
2309         """
2310         Forcefully move files from the current directory to the
2311         reject directory.  If any file already exists in the reject
2312         directory it will be moved to the morgue to make way for
2313         the new file.
2314
2315         @type reject_files: dict
2316         @param reject_files: file dictionary
2317
2318         """
2319
2320         cnf = Config()
2321
2322         for file_entry in reject_files:
2323             # Skip any files which don't exist or which we don't have permission to copy.
2324             if os.access(file_entry, os.R_OK) == 0:
2325                 continue
2326
2327             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2328
2329             try:
2330                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2331             except OSError, e:
2332                 # File exists?  Let's find a new name by adding a number
2333                 if e.errno == errno.EEXIST:
2334                     try:
2335                         dest_file = utils.find_next_free(dest_file, 255)
2336                     except NoFreeFilenameError:
2337                         # Something's either gone badly Pete Tong, or
2338                         # someone is trying to exploit us.
2339                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2340                         return
2341
2342                     # Make sure we really got it
2343                     try:
2344                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2345                     except OSError, e:
2346                         # Likewise
2347                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2348                         return
2349                 else:
2350                     raise
2351             # If we got here, we own the destination file, so we can
2352             # safely overwrite it.
2353             utils.move(file_entry, dest_file, 1, perms=0660)
2354             os.close(dest_fd)
2355
2356     ###########################################################################
2357     def do_reject (self, manual=0, reject_message="", notes=""):
2358         """
2359         Reject an upload. If called without a reject message or C{manual} is
2360         true, spawn an editor so the user can write one.
2361
2362         @type manual: bool
2363         @param manual: manual or automated rejection
2364
2365         @type reject_message: string
2366         @param reject_message: A reject message
2367
2368         @return: 0
2369
2370         """
2371         # If we weren't given a manual rejection message, spawn an
2372         # editor so the user can add one in...
2373         if manual and not reject_message:
2374             (fd, temp_filename) = utils.temp_filename()
2375             temp_file = os.fdopen(fd, 'w')
2376             if len(notes) > 0:
2377                 for note in notes:
2378                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2379                                     % (note.author, note.version, note.notedate, note.comment))
2380             temp_file.close()
2381             editor = os.environ.get("EDITOR","vi")
2382             answer = 'E'
2383             while answer == 'E':
2384                 os.system("%s %s" % (editor, temp_filename))
2385                 temp_fh = utils.open_file(temp_filename)
2386                 reject_message = "".join(temp_fh.readlines())
2387                 temp_fh.close()
2388                 print "Reject message:"
2389                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2390                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2391                 answer = "XXX"
2392                 while prompt.find(answer) == -1:
2393                     answer = utils.our_raw_input(prompt)
2394                     m = re_default_answer.search(prompt)
2395                     if answer == "":
2396                         answer = m.group(1)
2397                     answer = answer[:1].upper()
2398             os.unlink(temp_filename)
2399             if answer == 'A':
2400                 return 1
2401             elif answer == 'Q':
2402                 sys.exit(0)
2403
2404         print "Rejecting.\n"
2405
2406         cnf = Config()
2407
2408         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2409         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2410
2411         # Move all the files into the reject directory
2412         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2413         self.force_reject(reject_files)
2414
2415         # If we fail here someone is probably trying to exploit the race
2416         # so let's just raise an exception ...
2417         if os.path.exists(reason_filename):
2418             os.unlink(reason_filename)
2419         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2420
2421         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2422
2423         self.update_subst()
2424         if not manual:
2425             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2426             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2427             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2428             os.write(reason_fd, reject_message)
2429             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2430         else:
2431             # Build up the rejection email
2432             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2433             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2434             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2435             self.Subst["__REJECT_MESSAGE__"] = ""
2436             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2437             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2438             # Write the rejection email out as the <foo>.reason file
2439             os.write(reason_fd, reject_mail_message)
2440
2441         del self.Subst["__REJECTOR_ADDRESS__"]
2442         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2443         del self.Subst["__CC__"]
2444
2445         os.close(reason_fd)
2446
2447         # Send the rejection mail
2448         utils.send_mail(reject_mail_message)
2449
2450         if self.logger:
2451             self.logger.log(["rejected", self.pkg.changes_file])
2452
2453         return 0
2454
2455     ################################################################################
2456     def in_override_p(self, package, component, suite, binary_type, filename, session):
2457         """
2458         Check if a package already has override entries in the DB
2459
2460         @type package: string
2461         @param package: package name
2462
2463         @type component: string
2464         @param component: database id of the component
2465
2466         @type suite: int
2467         @param suite: database id of the suite
2468
2469         @type binary_type: string
2470         @param binary_type: type of the package
2471
2472         @type filename: string
2473         @param filename: filename we check
2474
2475         @return: the database result. But noone cares anyway.
2476
2477         """
2478
2479         cnf = Config()
2480
2481         if binary_type == "": # must be source
2482             file_type = "dsc"
2483         else:
2484             file_type = binary_type
2485
2486         # Override suite name; used for example with proposed-updates
2487         oldsuite = get_suite(suite, session)
2488         if (not oldsuite is None) and oldsuite.overridesuite:
2489             suite = oldsuite.overridesuite
2490
2491         result = get_override(package, suite, component, file_type, session)
2492
2493         # If checking for a source package fall back on the binary override type
2494         if file_type == "dsc" and len(result) < 1:
2495             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2496
2497         # Remember the section and priority so we can check them later if appropriate
2498         if len(result) > 0:
2499             result = result[0]
2500             self.pkg.files[filename]["override section"] = result.section.section
2501             self.pkg.files[filename]["override priority"] = result.priority.priority
2502             return result
2503
2504         return None
2505
2506     ################################################################################
2507     def get_anyversion(self, sv_list, suite):
2508         """
2509         @type sv_list: list
2510         @param sv_list: list of (suite, version) tuples to check
2511
2512         @type suite: string
2513         @param suite: suite name
2514
2515         Description: TODO
2516         """
2517         Cnf = Config()
2518         anyversion = None
2519         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2520         for (s, v) in sv_list:
2521             if s in [ x.lower() for x in anysuite ]:
2522                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2523                     anyversion = v
2524
2525         return anyversion
2526
2527     ################################################################################
2528
2529     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2530         """
2531         @type sv_list: list
2532         @param sv_list: list of (suite, version) tuples to check
2533
2534         @type filename: string
2535         @param filename: XXX
2536
2537         @type new_version: string
2538         @param new_version: XXX
2539
2540         Ensure versions are newer than existing packages in target
2541         suites and that cross-suite version checking rules as
2542         set out in the conf file are satisfied.
2543         """
2544
2545         cnf = Config()
2546
2547         # Check versions for each target suite
2548         for target_suite in self.pkg.changes["distribution"].keys():
2549             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2550             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2551
2552             # Enforce "must be newer than target suite" even if conffile omits it
2553             if target_suite not in must_be_newer_than:
2554                 must_be_newer_than.append(target_suite)
2555
2556             for (suite, existent_version) in sv_list:
2557                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2558
2559                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2560                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2561
2562                 if suite in must_be_older_than and vercmp > -1:
2563                     cansave = 0
2564
2565                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2566                         # we really use the other suite, ignoring the conflicting one ...
2567                         addsuite = self.pkg.changes["distribution-version"][suite]
2568
2569                         add_version = self.get_anyversion(sv_list, addsuite)
2570                         target_version = self.get_anyversion(sv_list, target_suite)
2571
2572                         if not add_version:
2573                             # not add_version can only happen if we map to a suite
2574                             # that doesn't enhance the suite we're propup'ing from.
2575                             # so "propup-ver x a b c; map a d" is a problem only if
2576                             # d doesn't enhance a.
2577                             #
2578                             # i think we could always propagate in this case, rather
2579                             # than complaining. either way, this isn't a REJECT issue
2580                             #
2581                             # And - we really should complain to the dorks who configured dak
2582                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2583                             self.pkg.changes.setdefault("propdistribution", {})
2584                             self.pkg.changes["propdistribution"][addsuite] = 1
2585                             cansave = 1
2586                         elif not target_version:
2587                             # not targets_version is true when the package is NEW
2588                             # we could just stick with the "...old version..." REJECT
2589                             # for this, I think.
2590                             self.rejects.append("Won't propogate NEW packages.")
2591                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2592                             # propogation would be redundant. no need to reject though.
2593                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2594                             cansave = 1
2595                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2596                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2597                             # propogate!!
2598                             self.warnings.append("Propogating upload to %s" % (addsuite))
2599                             self.pkg.changes.setdefault("propdistribution", {})
2600                             self.pkg.changes["propdistribution"][addsuite] = 1
2601                             cansave = 1
2602
2603                     if not cansave:
2604                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2605
2606     ################################################################################
2607     def check_binary_against_db(self, filename, session):
2608         # Ensure version is sane
2609         self.cross_suite_version_check( \
2610             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2611                 self.pkg.files[filename]["architecture"], session),
2612             filename, self.pkg.files[filename]["version"], sourceful=False)
2613
2614         # Check for any existing copies of the file
2615         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2616         q = q.filter_by(version=self.pkg.files[filename]["version"])
2617         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2618
2619         if q.count() > 0:
2620             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2621
2622     ################################################################################
2623
2624     def check_source_against_db(self, filename, session):
2625         source = self.pkg.dsc.get("source")
2626         version = self.pkg.dsc.get("version")
2627
2628         # Ensure version is sane
2629         self.cross_suite_version_check( \
2630             get_suite_version_by_source(source, session), filename, version,
2631             sourceful=True)
2632
2633     ################################################################################
2634     def check_dsc_against_db(self, filename, session):
2635         """
2636
2637         @warning: NB: this function can remove entries from the 'files' index [if
2638          the orig tarball is a duplicate of the one in the archive]; if
2639          you're iterating over 'files' and call this function as part of
2640          the loop, be sure to add a check to the top of the loop to
2641          ensure you haven't just tried to dereference the deleted entry.
2642
2643         """
2644
2645         Cnf = Config()
2646         self.pkg.orig_files = {} # XXX: do we need to clear it?
2647         orig_files = self.pkg.orig_files
2648
2649         # Try and find all files mentioned in the .dsc.  This has
2650         # to work harder to cope with the multiple possible
2651         # locations of an .orig.tar.gz.
2652         # The ordering on the select is needed to pick the newest orig
2653         # when it exists in multiple places.
2654         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2655             found = None
2656             if self.pkg.files.has_key(dsc_name):
2657                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2658                 actual_size = int(self.pkg.files[dsc_name]["size"])
2659                 found = "%s in incoming" % (dsc_name)
2660
2661                 # Check the file does not already exist in the archive
2662                 ql = get_poolfile_like_name(dsc_name, session)
2663
2664                 # Strip out anything that isn't '%s' or '/%s$'
2665                 for i in ql:
2666                     if not i.filename.endswith(dsc_name):
2667                         ql.remove(i)
2668
2669                 # "[dak] has not broken them.  [dak] has fixed a
2670                 # brokenness.  Your crappy hack exploited a bug in
2671                 # the old dinstall.
2672                 #
2673                 # "(Come on!  I thought it was always obvious that
2674                 # one just doesn't release different files with
2675                 # the same name and version.)"
2676                 #                        -- ajk@ on d-devel@l.d.o
2677
2678                 if len(ql) > 0:
2679                     # Ignore exact matches for .orig.tar.gz
2680                     match = 0
2681                     if re_is_orig_source.match(dsc_name):
2682                         for i in ql:
2683                             if self.pkg.files.has_key(dsc_name) and \
2684                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2685                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2686                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2687                                 # TODO: Don't delete the entry, just mark it as not needed
2688                                 # This would fix the stupidity of changing something we often iterate over
2689                                 # whilst we're doing it
2690                                 del self.pkg.files[dsc_name]
2691                                 dsc_entry["files id"] = i.file_id
2692                                 if not orig_files.has_key(dsc_name):
2693                                     orig_files[dsc_name] = {}
2694                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2695                                 match = 1
2696
2697                                 # Don't bitch that we couldn't find this file later
2698                                 try:
2699                                     self.later_check_files.remove(dsc_name)
2700                                 except ValueError:
2701                                     pass
2702
2703
2704                     if not match:
2705                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2706
2707             elif re_is_orig_source.match(dsc_name):
2708                 # Check in the pool
2709                 ql = get_poolfile_like_name(dsc_name, session)
2710
2711                 # Strip out anything that isn't '%s' or '/%s$'
2712                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2713                 for i in ql:
2714                     if not i.filename.endswith(dsc_name):
2715                         ql.remove(i)
2716
2717                 if len(ql) > 0:
2718                     # Unfortunately, we may get more than one match here if,
2719                     # for example, the package was in potato but had an -sa
2720                     # upload in woody.  So we need to choose the right one.
2721
2722                     # default to something sane in case we don't match any or have only one
2723                     x = ql[0]
2724
2725                     if len(ql) > 1:
2726                         for i in ql:
2727                             old_file = os.path.join(i.location.path, i.filename)
2728                             old_file_fh = utils.open_file(old_file)
2729                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2730                             old_file_fh.close()
2731                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2732                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2733                                 x = i
2734
2735                     old_file = os.path.join(i.location.path, i.filename)
2736                     old_file_fh = utils.open_file(old_file)
2737                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2738                     old_file_fh.close()
2739                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2740                     found = old_file
2741                     suite_type = x.location.archive_type
2742                     # need this for updating dsc_files in install()
2743                     dsc_entry["files id"] = x.file_id
2744                     # See install() in process-accepted...
2745                     if not orig_files.has_key(dsc_name):
2746                         orig_files[dsc_name] = {}
2747                     orig_files[dsc_name]["id"] = x.file_id
2748                     orig_files[dsc_name]["path"] = old_file
2749                     orig_files[dsc_name]["location"] = x.location.location_id
2750                 else:
2751                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2752                     # Not there? Check the queue directories...
2753                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2754                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2755                             continue
2756                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2757                         if os.path.exists(in_otherdir):
2758                             in_otherdir_fh = utils.open_file(in_otherdir)
2759                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2760                             in_otherdir_fh.close()
2761                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2762                             found = in_otherdir
2763                             if not orig_files.has_key(dsc_name):
2764                                 orig_files[dsc_name] = {}
2765                             orig_files[dsc_name]["path"] = in_otherdir
2766
2767                     if not found:
2768                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2769                         continue
2770             else:
2771                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2772                 continue
2773             if actual_md5 != dsc_entry["md5sum"]:
2774                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2775             if actual_size != int(dsc_entry["size"]):
2776                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2777
2778     ################################################################################
2779     # This is used by process-new and process-holding to recheck a changes file
2780     # at the time we're running.  It mainly wraps various other internal functions
2781     # and is similar to accepted_checks - these should probably be tidied up
2782     # and combined
2783     def recheck(self, session):
2784         cnf = Config()
2785         for f in self.pkg.files.keys():
2786             # The .orig.tar.gz can disappear out from under us is it's a
2787             # duplicate of one in the archive.
2788             if not self.pkg.files.has_key(f):
2789                 continue
2790
2791             entry = self.pkg.files[f]
2792
2793             # Check that the source still exists
2794             if entry["type"] == "deb":
2795                 source_version = entry["source version"]
2796                 source_package = entry["source package"]
2797                 if not self.pkg.changes["architecture"].has_key("source") \
2798                    and not source_exists(source_package, source_version, \
2799                     suites = self.pkg.changes["distribution"].keys(), session = session):
2800                     source_epochless_version = re_no_epoch.sub('', source_version)
2801                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2802                     found = False
2803                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2804                         if cnf.has_key("Dir::Queue::%s" % (q)):
2805                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2806                                 found = True
2807                     if not found:
2808                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2809
2810             # Version and file overwrite checks
2811             if entry["type"] == "deb":
2812                 self.check_binary_against_db(f, session)
2813             elif entry["type"] == "dsc":
2814                 self.check_source_against_db(f, session)
2815                 self.check_dsc_against_db(f, session)
2816
2817     ################################################################################
2818     def accepted_checks(self, overwrite_checks, session):
2819         # Recheck anything that relies on the database; since that's not
2820         # frozen between accept and our run time when called from p-a.
2821
2822         # overwrite_checks is set to False when installing to stable/oldstable
2823
2824         propogate={}
2825         nopropogate={}
2826
2827         # Find the .dsc (again)
2828         dsc_filename = None
2829         for f in self.pkg.files.keys():
2830             if self.pkg.files[f]["type"] == "dsc":
2831                 dsc_filename = f
2832
2833         for checkfile in self.pkg.files.keys():
2834             # The .orig.tar.gz can disappear out from under us is it's a
2835             # duplicate of one in the archive.
2836             if not self.pkg.files.has_key(checkfile):
2837                 continue
2838
2839             entry = self.pkg.files[checkfile]
2840
2841             # Check that the source still exists
2842             if entry["type"] == "deb":
2843                 source_version = entry["source version"]
2844                 source_package = entry["source package"]
2845                 if not self.pkg.changes["architecture"].has_key("source") \
2846                    and not source_exists(source_package, source_version, \
2847                     suites = self.pkg.changes["distribution"].keys(), \
2848                     session = session):
2849                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2850
2851             # Version and file overwrite checks
2852             if overwrite_checks:
2853                 if entry["type"] == "deb":
2854                     self.check_binary_against_db(checkfile, session)
2855                 elif entry["type"] == "dsc":
2856                     self.check_source_against_db(checkfile, session)
2857                     self.check_dsc_against_db(dsc_filename, session)
2858
2859             # propogate in the case it is in the override tables:
2860             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2861                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2862                     propogate[suite] = 1
2863                 else:
2864                     nopropogate[suite] = 1
2865
2866         for suite in propogate.keys():
2867             if suite in nopropogate:
2868                 continue
2869             self.pkg.changes["distribution"][suite] = 1
2870
2871         for checkfile in self.pkg.files.keys():
2872             # Check the package is still in the override tables
2873             for suite in self.pkg.changes["distribution"].keys():
2874                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2875                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2876
2877     ################################################################################
2878     # If any file of an upload has a recent mtime then chances are good
2879     # the file is still being uploaded.
2880
2881     def upload_too_new(self):
2882         cnf = Config()
2883         too_new = False
2884         # Move back to the original directory to get accurate time stamps
2885         cwd = os.getcwd()
2886         os.chdir(self.pkg.directory)
2887         file_list = self.pkg.files.keys()
2888         file_list.extend(self.pkg.dsc_files.keys())
2889         file_list.append(self.pkg.changes_file)
2890         for f in file_list:
2891             try:
2892                 last_modified = time.time()-os.path.getmtime(f)
2893                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2894                     too_new = True
2895                     break
2896             except:
2897                 pass
2898
2899         os.chdir(cwd)
2900         return too_new
2901
2902     def store_changelog(self):
2903
2904         # Skip binary-only upload if it is not a bin-NMU
2905         if not self.pkg.changes['architecture'].has_key('source'):
2906             from daklib.regexes import re_bin_only_nmu
2907             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2908                 return
2909
2910         session = DBConn().session()
2911
2912         # Check if upload already has a changelog entry
2913         query = """SELECT changelog_id FROM changes WHERE source = :source
2914                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2915         if session.execute(query, {'source': self.pkg.changes['source'], \
2916                                    'version': self.pkg.changes['version'], \
2917                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2918             session.commit()
2919             return
2920
2921         # Add current changelog text into changelogs_text table, return created ID
2922         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2923         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2924
2925         # Link ID to the upload available in changes table
2926         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2927                    AND version = :version AND architecture = :architecture"""
2928         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2929                                 'version': self.pkg.changes['version'], \
2930                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2931
2932         session.commit()