]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Make use of Package-Set in determine_new
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 # suppress some deprecation warnings in squeeze related to apt_pkg
60 # module
61 import warnings
62 warnings.filterwarnings('ignore', \
63     "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64     DeprecationWarning)
65 warnings.filterwarnings('ignore', \
66     "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
67     DeprecationWarning)
68
69 ###############################################################################
70
71 def get_type(f, session):
72     """
73     Get the file type of C{f}
74
75     @type f: dict
76     @param f: file entry from Changes object
77
78     @type session: SQLA Session
79     @param session: SQL Alchemy session object
80
81     @rtype: string
82     @return: filetype
83
84     """
85     # Determine the type
86     if f.has_key("dbtype"):
87         file_type = f["dbtype"]
88     elif re_source_ext.match(f["type"]):
89         file_type = "dsc"
90     else:
91         file_type = f["type"]
92         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
93
94     # Validate the override type
95     type_id = get_override_type(file_type, session)
96     if type_id is None:
97         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
98
99     return file_type
100
101 ################################################################################
102
103 # Determine what parts in a .changes are NEW
104
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None):
106     """
107     Determine what parts in a C{changes} file are NEW.
108
109     @type filename: str
110     @param filename: changes filename
111
112     @type changes: Upload.Pkg.changes dict
113     @param changes: Changes dictionary
114
115     @type files: Upload.Pkg.files dict
116     @param files: Files dictionary
117
118     @type warn: bool
119     @param warn: Warn if overrides are added for (old)stable
120
121     @type dsc: Upload.Pkg.dsc dict
122     @param dsc: (optional); Dsc dictionary
123
124     @rtype: dict
125     @return: dictionary of NEW components.
126
127     """
128     # TODO: This should all use the database instead of parsing the changes
129     # file again
130     new = {}
131     byhand = {}
132
133     dbchg = get_dbchange(filename, session)
134     if dbchg is None:
135         print "Warning: cannot find changes file in database; won't check byhand"
136
137     # Try to get the Package-Set field from an included .dsc file (if possible).
138     if dsc:
139         new = build_package_set(dsc, session)
140
141     # Build up a list of potentially new things
142     for name, f in files.items():
143         # Keep a record of byhand elements
144         if f["section"] == "byhand":
145             byhand[name] = 1
146             continue
147
148         pkg = f["package"]
149         priority = f["priority"]
150         section = f["section"]
151         file_type = get_type(f, session)
152         component = f["component"]
153
154         if file_type == "dsc":
155             priority = "source"
156
157         if not new.has_key(pkg):
158             new[pkg] = {}
159             new[pkg]["priority"] = priority
160             new[pkg]["section"] = section
161             new[pkg]["type"] = file_type
162             new[pkg]["component"] = component
163             new[pkg]["files"] = []
164         else:
165             old_type = new[pkg]["type"]
166             if old_type != file_type:
167                 # source gets trumped by deb or udeb
168                 if old_type == "dsc":
169                     new[pkg]["priority"] = priority
170                     new[pkg]["section"] = section
171                     new[pkg]["type"] = file_type
172                     new[pkg]["component"] = component
173
174         new[pkg]["files"].append(name)
175
176         if f.has_key("othercomponents"):
177             new[pkg]["othercomponents"] = f["othercomponents"]
178
179     # Fix up the list of target suites
180     cnf = Config()
181     for suite in changes["suite"].keys():
182         oldsuite = get_suite(suite, session)
183         if not oldsuite:
184             print "WARNING: Invalid suite %s found" % suite
185             continue
186
187         if oldsuite.overridesuite:
188             newsuite = get_suite(oldsuite.overridesuite, session)
189
190             if newsuite:
191                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
192                     oldsuite.overridesuite, suite)
193                 del changes["suite"][suite]
194                 changes["suite"][oldsuite.overridesuite] = 1
195             else:
196                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
197                     oldsuite.overridesuite, suite)
198
199     # Check for unprocessed byhand files
200     if dbchg is not None:
201         for b in byhand.keys():
202             # Find the file entry in the database
203             found = False
204             for f in dbchg.files:
205                 if f.filename == b:
206                     found = True
207                     # If it's processed, we can ignore it
208                     if f.processed:
209                         del byhand[b]
210                     break
211
212             if not found:
213                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
214
215     # Check for new stuff
216     for suite in changes["suite"].keys():
217         for pkg in new.keys():
218             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
219             if len(ql) > 0:
220                 for file_entry in new[pkg]["files"]:
221                     if files[file_entry].has_key("new"):
222                         del files[file_entry]["new"]
223                 del new[pkg]
224
225     if warn:
226         for s in ['stable', 'oldstable']:
227             if changes["suite"].has_key(s):
228                 print "WARNING: overrides will be added for %s!" % s
229         for pkg in new.keys():
230             if new[pkg].has_key("othercomponents"):
231                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
232
233     return new, byhand
234
235 ################################################################################
236
237 def check_valid(new, session = None):
238     """
239     Check if section and priority for NEW packages exist in database.
240     Additionally does sanity checks:
241       - debian-installer packages have to be udeb (or source)
242       - non debian-installer packages can not be udeb
243       - source priority can only be assigned to dsc file types
244
245     @type new: dict
246     @param new: Dict of new packages with their section, priority and type.
247
248     """
249     for pkg in new.keys():
250         section_name = new[pkg]["section"]
251         priority_name = new[pkg]["priority"]
252         file_type = new[pkg]["type"]
253
254         section = get_section(section_name, session)
255         if section is None:
256             new[pkg]["section id"] = -1
257         else:
258             new[pkg]["section id"] = section.section_id
259
260         priority = get_priority(priority_name, session)
261         if priority is None:
262             new[pkg]["priority id"] = -1
263         else:
264             new[pkg]["priority id"] = priority.priority_id
265
266         # Sanity checks
267         di = section_name.find("debian-installer") != -1
268
269         # If d-i, we must be udeb and vice-versa
270         if     (di and file_type not in ("udeb", "dsc")) or \
271            (not di and file_type == "udeb"):
272             new[pkg]["section id"] = -1
273
274         # If dsc we need to be source and vice-versa
275         if (priority == "source" and file_type != "dsc") or \
276            (priority != "source" and file_type == "dsc"):
277             new[pkg]["priority id"] = -1
278
279 ###############################################################################
280
281 # Used by Upload.check_timestamps
282 class TarTime(object):
283     def __init__(self, future_cutoff, past_cutoff):
284         self.reset()
285         self.future_cutoff = future_cutoff
286         self.past_cutoff = past_cutoff
287
288     def reset(self):
289         self.future_files = {}
290         self.ancient_files = {}
291
292     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
293         if MTime > self.future_cutoff:
294             self.future_files[Name] = MTime
295         if MTime < self.past_cutoff:
296             self.ancient_files[Name] = MTime
297
298 ###############################################################################
299
300 def prod_maintainer(notes, upload):
301     cnf = Config()
302
303     # Here we prepare an editor and get them ready to prod...
304     (fd, temp_filename) = utils.temp_filename()
305     temp_file = os.fdopen(fd, 'w')
306     for note in notes:
307         temp_file.write(note.comment)
308     temp_file.close()
309     editor = os.environ.get("EDITOR","vi")
310     answer = 'E'
311     while answer == 'E':
312         os.system("%s %s" % (editor, temp_filename))
313         temp_fh = utils.open_file(temp_filename)
314         prod_message = "".join(temp_fh.readlines())
315         temp_fh.close()
316         print "Prod message:"
317         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
318         prompt = "[P]rod, Edit, Abandon, Quit ?"
319         answer = "XXX"
320         while prompt.find(answer) == -1:
321             answer = utils.our_raw_input(prompt)
322             m = re_default_answer.search(prompt)
323             if answer == "":
324                 answer = m.group(1)
325             answer = answer[:1].upper()
326     os.unlink(temp_filename)
327     if answer == 'A':
328         return
329     elif answer == 'Q':
330         end()
331         sys.exit(0)
332     # Otherwise, do the proding...
333     user_email_address = utils.whoami() + " <%s>" % (
334         cnf["Dinstall::MyAdminAddress"])
335
336     Subst = upload.Subst
337
338     Subst["__FROM_ADDRESS__"] = user_email_address
339     Subst["__PROD_MESSAGE__"] = prod_message
340     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
341
342     prod_mail_message = utils.TemplateSubst(
343         Subst,cnf["Dir::Templates"]+"/process-new.prod")
344
345     # Send the prod mail
346     utils.send_mail(prod_mail_message)
347
348     print "Sent prodding message"
349
350 ################################################################################
351
352 def edit_note(note, upload, session, trainee=False):
353     # Write the current data to a temporary file
354     (fd, temp_filename) = utils.temp_filename()
355     editor = os.environ.get("EDITOR","vi")
356     answer = 'E'
357     while answer == 'E':
358         os.system("%s %s" % (editor, temp_filename))
359         temp_file = utils.open_file(temp_filename)
360         newnote = temp_file.read().rstrip()
361         temp_file.close()
362         print "New Note:"
363         print utils.prefix_multi_line_string(newnote,"  ")
364         prompt = "[D]one, Edit, Abandon, Quit ?"
365         answer = "XXX"
366         while prompt.find(answer) == -1:
367             answer = utils.our_raw_input(prompt)
368             m = re_default_answer.search(prompt)
369             if answer == "":
370                 answer = m.group(1)
371             answer = answer[:1].upper()
372     os.unlink(temp_filename)
373     if answer == 'A':
374         return
375     elif answer == 'Q':
376         end()
377         sys.exit(0)
378
379     comment = NewComment()
380     comment.package = upload.pkg.changes["source"]
381     comment.version = upload.pkg.changes["version"]
382     comment.comment = newnote
383     comment.author  = utils.whoami()
384     comment.trainee = trainee
385     session.add(comment)
386     session.commit()
387
388 ###############################################################################
389
390 # suite names DMs can upload to
391 dm_suites = ['unstable', 'experimental']
392
393 def get_newest_source(source, session):
394     'returns the newest DBSource object in dm_suites'
395     ## the most recent version of the package uploaded to unstable or
396     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
397     ## section of its control file
398     q = session.query(DBSource).filter_by(source = source). \
399         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
400         order_by(desc('source.version'))
401     return q.first()
402
403 def get_suite_version_by_source(source, session):
404     'returns a list of tuples (suite_name, version) for source package'
405     q = session.query(Suite.suite_name, DBSource.version). \
406         join(Suite.sources).filter_by(source = source)
407     return q.all()
408
409 def get_source_by_package_and_suite(package, suite_name, session):
410     '''
411     returns a DBSource query filtered by DBBinary.package and this package's
412     suite_name
413     '''
414     return session.query(DBSource). \
415         join(DBSource.binaries).filter_by(package = package). \
416         join(DBBinary.suites).filter_by(suite_name = suite_name)
417
418 def get_suite_version_by_package(package, arch_string, session):
419     '''
420     returns a list of tuples (suite_name, version) for binary package and
421     arch_string
422     '''
423     return session.query(Suite.suite_name, DBBinary.version). \
424         join(Suite.binaries).filter_by(package = package). \
425         join(DBBinary.architecture). \
426         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
427
428 class Upload(object):
429     """
430     Everything that has to do with an upload processed.
431
432     """
433     def __init__(self):
434         self.logger = None
435         self.pkg = Changes()
436         self.reset()
437
438     ###########################################################################
439
440     def reset (self):
441         """ Reset a number of internal variables."""
442
443         # Initialize the substitution template map
444         cnf = Config()
445         self.Subst = {}
446         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
447         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError, line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError, line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError, format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError, msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError, msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not Cnf.has_key("Suite::%s" % (suite)):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError, e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
883                     entry["byhand"] = 1
884                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
885                     entry["new"] = 1
886                 else:
887                     dsc_file_exists = False
888                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
889                         if cnf.has_key("Dir::Queue::%s" % (myq)):
890                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
891                                 dsc_file_exists = True
892                                 break
893
894                     if not dsc_file_exists:
895                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
896
897         # Check the version and for file overwrites
898         self.check_binary_against_db(f, session)
899
900     def source_file_checks(self, f, session):
901         entry = self.pkg.files[f]
902
903         m = re_issource.match(f)
904         if not m:
905             return
906
907         entry["package"] = m.group(1)
908         entry["version"] = m.group(2)
909         entry["type"] = m.group(3)
910
911         # Ensure the source package name matches the Source filed in the .changes
912         if self.pkg.changes["source"] != entry["package"]:
913             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
914
915         # Ensure the source version matches the version in the .changes file
916         if re_is_orig_source.match(f):
917             changes_version = self.pkg.changes["chopversion2"]
918         else:
919             changes_version = self.pkg.changes["chopversion"]
920
921         if changes_version != entry["version"]:
922             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
923
924         # Ensure the .changes lists source in the Architecture field
925         if not self.pkg.changes["architecture"].has_key("source"):
926             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
927
928         # Check the signature of a .dsc file
929         if entry["type"] == "dsc":
930             # check_signature returns either:
931             #  (None, [list, of, rejects]) or (signature, [])
932             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
933             for j in rejects:
934                 self.rejects.append(j)
935
936         entry["architecture"] = "source"
937
938     def per_suite_file_checks(self, f, suite, session):
939         cnf = Config()
940         entry = self.pkg.files[f]
941
942         # Skip byhand
943         if entry.has_key("byhand"):
944             return
945
946         # Check we have fields we need to do these checks
947         oktogo = True
948         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949             if not entry.has_key(m):
950                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
951                 oktogo = False
952
953         if not oktogo:
954             return
955
956         # Handle component mappings
957         for m in cnf.ValueList("ComponentMappings"):
958             (source, dest) = m.split()
959             if entry["component"] == source:
960                 entry["original component"] = source
961                 entry["component"] = dest
962
963         # Ensure the component is valid for the target suite
964         if cnf.has_key("Suite:%s::Components" % (suite)) and \
965            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
966             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
967             return
968
969         # Validate the component
970         if not get_component(entry["component"], session):
971             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
972             return
973
974         # See if the package is NEW
975         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
976             entry["new"] = 1
977
978         # Validate the priority
979         if entry["priority"].find('/') != -1:
980             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
981
982         # Determine the location
983         location = cnf["Dir::Pool"]
984         l = get_location(location, entry["component"], session=session)
985         if l is None:
986             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
987             entry["location id"] = -1
988         else:
989             entry["location id"] = l.location_id
990
991         # Check the md5sum & size against existing files (if any)
992         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
993
994         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
995                                          entry["size"], entry["md5sum"], entry["location id"])
996
997         if found is None:
998             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
999         elif found is False and poolfile is not None:
1000             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1001         else:
1002             if poolfile is None:
1003                 entry["files id"] = None
1004             else:
1005                 entry["files id"] = poolfile.file_id
1006
1007         # Check for packages that have moved from one component to another
1008         entry['suite'] = suite
1009         arch_list = [entry["architecture"], 'all']
1010         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1011             [suite], arch_list = arch_list, session = session)
1012         if component is not None:
1013             entry["othercomponents"] = component
1014
1015     def check_files(self, action=True):
1016         file_keys = self.pkg.files.keys()
1017         holding = Holding()
1018         cnf = Config()
1019
1020         if action:
1021             cwd = os.getcwd()
1022             os.chdir(self.pkg.directory)
1023             for f in file_keys:
1024                 ret = holding.copy_to_holding(f)
1025                 if ret is not None:
1026                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1027
1028             os.chdir(cwd)
1029
1030         # check we already know the changes file
1031         # [NB: this check must be done post-suite mapping]
1032         base_filename = os.path.basename(self.pkg.changes_file)
1033
1034         session = DBConn().session()
1035
1036         try:
1037             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1038             # if in the pool or in a queue other than unchecked, reject
1039             if (dbc.in_queue is None) \
1040                    or (dbc.in_queue is not None
1041                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1042                 self.rejects.append("%s file already known to dak" % base_filename)
1043         except NoResultFound, e:
1044             # not known, good
1045             pass
1046
1047         has_binaries = False
1048         has_source = False
1049
1050         for f, entry in self.pkg.files.items():
1051             # Ensure the file does not already exist in one of the accepted directories
1052             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1053                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1054                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1055                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1056
1057             if not re_taint_free.match(f):
1058                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1059
1060             # Check the file is readable
1061             if os.access(f, os.R_OK) == 0:
1062                 # When running in -n, copy_to_holding() won't have
1063                 # generated the reject_message, so we need to.
1064                 if action:
1065                     if os.path.exists(f):
1066                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1067                     else:
1068                         # Don't directly reject, mark to check later to deal with orig's
1069                         # we can find in the pool
1070                         self.later_check_files.append(f)
1071                 entry["type"] = "unreadable"
1072                 continue
1073
1074             # If it's byhand skip remaining checks
1075             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1076                 entry["byhand"] = 1
1077                 entry["type"] = "byhand"
1078
1079             # Checks for a binary package...
1080             elif re_isadeb.match(f):
1081                 has_binaries = True
1082                 entry["type"] = "deb"
1083
1084                 # This routine appends to self.rejects/warnings as appropriate
1085                 self.binary_file_checks(f, session)
1086
1087             # Checks for a source package...
1088             elif re_issource.match(f):
1089                 has_source = True
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.source_file_checks(f, session)
1093
1094             # Not a binary or source package?  Assume byhand...
1095             else:
1096                 entry["byhand"] = 1
1097                 entry["type"] = "byhand"
1098
1099             # Per-suite file checks
1100             entry["oldfiles"] = {}
1101             for suite in self.pkg.changes["distribution"].keys():
1102                 self.per_suite_file_checks(f, suite, session)
1103
1104         session.close()
1105
1106         # If the .changes file says it has source, it must have source.
1107         if self.pkg.changes["architecture"].has_key("source"):
1108             if not has_source:
1109                 self.rejects.append("no source found and Architecture line in changes mention source.")
1110
1111             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1112                 self.rejects.append("source only uploads are not supported.")
1113
1114     ###########################################################################
1115
1116     def __dsc_filename(self):
1117         """
1118         Returns: (Status, Dsc_Filename)
1119         where
1120           Status: Boolean; True when there was no error, False otherwise
1121           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1122         """
1123         dsc_filename = None
1124
1125         # find the dsc
1126         for name, entry in self.pkg.files.items():
1127             if entry.has_key("type") and entry["type"] == "dsc":
1128                 if dsc_filename:
1129                     return False, "cannot process a .changes file with multiple .dsc's."
1130                 else:
1131                     dsc_filename = name
1132
1133         if not dsc_filename:
1134             return False, "source uploads must contain a dsc file"
1135
1136         return True, dsc_filename
1137
1138     def load_dsc(self, action=True, signing_rules=1):
1139         """
1140         Find and load the dsc from self.pkg.files into self.dsc
1141
1142         Returns: (Status, Reason)
1143         where
1144           Status: Boolean; True when there was no error, False otherwise
1145           Reason: String; When Status is False this describes the error
1146         """
1147
1148         # find the dsc
1149         (status, dsc_filename) = self.__dsc_filename()
1150         if not status:
1151             # If status is false, dsc_filename has the reason
1152             return False, dsc_filename
1153
1154         try:
1155             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1156         except CantOpenError:
1157             if not action:
1158                 return False, "%s: can't read file." % (dsc_filename)
1159         except ParseChangesError, line:
1160             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1161         except InvalidDscError, line:
1162             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1163         except ChangesUnicodeError:
1164             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1165
1166         return True, None
1167
1168     ###########################################################################
1169
1170     def check_dsc(self, action=True, session=None):
1171         """Returns bool indicating whether or not the source changes are valid"""
1172         # Ensure there is source to check
1173         if not self.pkg.changes["architecture"].has_key("source"):
1174             return True
1175
1176         (status, reason) = self.load_dsc(action=action)
1177         if not status:
1178             self.rejects.append(reason)
1179             return False
1180         (status, dsc_filename) = self.__dsc_filename()
1181         if not status:
1182             # If status is false, dsc_filename has the reason
1183             self.rejects.append(dsc_filename)
1184             return False
1185
1186         # Build up the file list of files mentioned by the .dsc
1187         try:
1188             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1189         except NoFilesFieldError:
1190             self.rejects.append("%s: no Files: field." % (dsc_filename))
1191             return False
1192         except UnknownFormatError, format:
1193             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1194             return False
1195         except ParseChangesError, line:
1196             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1197             return False
1198
1199         # Enforce mandatory fields
1200         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1201             if not self.pkg.dsc.has_key(i):
1202                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1203                 return False
1204
1205         # Validate the source and version fields
1206         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1207             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1208         if not re_valid_version.match(self.pkg.dsc["version"]):
1209             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1210
1211         # Only a limited list of source formats are allowed in each suite
1212         for dist in self.pkg.changes["distribution"].keys():
1213             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1214             if self.pkg.dsc["format"] not in allowed:
1215                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1216
1217         # Validate the Maintainer field
1218         try:
1219             # We ignore the return value
1220             fix_maintainer(self.pkg.dsc["maintainer"])
1221         except ParseMaintError, msg:
1222             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1223                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1224
1225         # Validate the build-depends field(s)
1226         for field_name in [ "build-depends", "build-depends-indep" ]:
1227             field = self.pkg.dsc.get(field_name)
1228             if field:
1229                 # Have apt try to parse them...
1230                 try:
1231                     apt_pkg.ParseSrcDepends(field)
1232                 except:
1233                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1234
1235         # Ensure the version number in the .dsc matches the version number in the .changes
1236         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1237         changes_version = self.pkg.files[dsc_filename]["version"]
1238
1239         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1240             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1241
1242         # Ensure the Files field contain only what's expected
1243         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1244
1245         # Ensure source is newer than existing source in target suites
1246         session = DBConn().session()
1247         self.check_source_against_db(dsc_filename, session)
1248         self.check_dsc_against_db(dsc_filename, session)
1249
1250         dbchg = get_dbchange(self.pkg.changes_file, session)
1251
1252         # Finally, check if we're missing any files
1253         for f in self.later_check_files:
1254             print 'XXX: %s' % f
1255             # Check if we've already processed this file if we have a dbchg object
1256             ok = False
1257             if dbchg:
1258                 for pf in dbchg.files:
1259                     if pf.filename == f and pf.processed:
1260                         self.notes.append('%s was already processed so we can go ahead' % f)
1261                         ok = True
1262                         del self.pkg.files[f]
1263             if not ok:
1264                 self.rejects.append("Could not find file %s references in changes" % f)
1265
1266         session.close()
1267
1268         return True
1269
1270     ###########################################################################
1271
1272     def get_changelog_versions(self, source_dir):
1273         """Extracts a the source package and (optionally) grabs the
1274         version history out of debian/changelog for the BTS."""
1275
1276         cnf = Config()
1277
1278         # Find the .dsc (again)
1279         dsc_filename = None
1280         for f in self.pkg.files.keys():
1281             if self.pkg.files[f]["type"] == "dsc":
1282                 dsc_filename = f
1283
1284         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1285         if not dsc_filename:
1286             return
1287
1288         # Create a symlink mirror of the source files in our temporary directory
1289         for f in self.pkg.files.keys():
1290             m = re_issource.match(f)
1291             if m:
1292                 src = os.path.join(source_dir, f)
1293                 # If a file is missing for whatever reason, give up.
1294                 if not os.path.exists(src):
1295                     return
1296                 ftype = m.group(3)
1297                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1298                    self.pkg.orig_files[f].has_key("path"):
1299                     continue
1300                 dest = os.path.join(os.getcwd(), f)
1301                 os.symlink(src, dest)
1302
1303         # If the orig files are not a part of the upload, create symlinks to the
1304         # existing copies.
1305         for orig_file in self.pkg.orig_files.keys():
1306             if not self.pkg.orig_files[orig_file].has_key("path"):
1307                 continue
1308             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1309             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1310
1311         # Extract the source
1312         try:
1313             unpacked = UnpackedSource(dsc_filename)
1314         except:
1315             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1316             return
1317
1318         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1319             return
1320
1321         # Get the upstream version
1322         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1323         if re_strip_revision.search(upstr_version):
1324             upstr_version = re_strip_revision.sub('', upstr_version)
1325
1326         # Ensure the changelog file exists
1327         changelog_file = unpacked.get_changelog_file()
1328         if changelog_file is None:
1329             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1330             return
1331
1332         # Parse the changelog
1333         self.pkg.dsc["bts changelog"] = ""
1334         for line in changelog_file.readlines():
1335             m = re_changelog_versions.match(line)
1336             if m:
1337                 self.pkg.dsc["bts changelog"] += line
1338         changelog_file.close()
1339         unpacked.cleanup()
1340
1341         # Check we found at least one revision in the changelog
1342         if not self.pkg.dsc["bts changelog"]:
1343             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1344
1345     def check_source(self):
1346         # Bail out if:
1347         #    a) there's no source
1348         if not self.pkg.changes["architecture"].has_key("source"):
1349             return
1350
1351         tmpdir = utils.temp_dirname()
1352
1353         # Move into the temporary directory
1354         cwd = os.getcwd()
1355         os.chdir(tmpdir)
1356
1357         # Get the changelog version history
1358         self.get_changelog_versions(cwd)
1359
1360         # Move back and cleanup the temporary tree
1361         os.chdir(cwd)
1362
1363         try:
1364             shutil.rmtree(tmpdir)
1365         except OSError, e:
1366             if e.errno != errno.EACCES:
1367                 print "foobar"
1368                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1369
1370             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1371             # We probably have u-r or u-w directories so chmod everything
1372             # and try again.
1373             cmd = "chmod -R u+rwx %s" % (tmpdir)
1374             result = os.system(cmd)
1375             if result != 0:
1376                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1377             shutil.rmtree(tmpdir)
1378         except Exception, e:
1379             print "foobar2 (%s)" % e
1380             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1381
1382     ###########################################################################
1383     def ensure_hashes(self):
1384         # Make sure we recognise the format of the Files: field in the .changes
1385         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1386         if len(format) == 2:
1387             format = int(format[0]), int(format[1])
1388         else:
1389             format = int(float(format[0])), 0
1390
1391         # We need to deal with the original changes blob, as the fields we need
1392         # might not be in the changes dict serialised into the .dak anymore.
1393         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1394
1395         # Copy the checksums over to the current changes dict.  This will keep
1396         # the existing modifications to it intact.
1397         for field in orig_changes:
1398             if field.startswith('checksums-'):
1399                 self.pkg.changes[field] = orig_changes[field]
1400
1401         # Check for unsupported hashes
1402         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1403             self.rejects.append(j)
1404
1405         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1406             self.rejects.append(j)
1407
1408         # We have to calculate the hash if we have an earlier changes version than
1409         # the hash appears in rather than require it exist in the changes file
1410         for hashname, hashfunc, version in utils.known_hashes:
1411             # TODO: Move _ensure_changes_hash into this class
1412             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1413                 self.rejects.append(j)
1414             if "source" in self.pkg.changes["architecture"]:
1415                 # TODO: Move _ensure_dsc_hash into this class
1416                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1417                     self.rejects.append(j)
1418
1419     def check_hashes(self):
1420         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1421             self.rejects.append(m)
1422
1423         for m in utils.check_size(".changes", self.pkg.files):
1424             self.rejects.append(m)
1425
1426         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1427             self.rejects.append(m)
1428
1429         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1430             self.rejects.append(m)
1431
1432         self.ensure_hashes()
1433
1434     ###########################################################################
1435
1436     def ensure_orig(self, target_dir='.', session=None):
1437         """
1438         Ensures that all orig files mentioned in the changes file are present
1439         in target_dir. If they do not exist, they are symlinked into place.
1440
1441         An list containing the symlinks that were created are returned (so they
1442         can be removed).
1443         """
1444
1445         symlinked = []
1446         cnf = Config()
1447
1448         for filename, entry in self.pkg.dsc_files.iteritems():
1449             if not re_is_orig_source.match(filename):
1450                 # File is not an orig; ignore
1451                 continue
1452
1453             if os.path.exists(filename):
1454                 # File exists, no need to continue
1455                 continue
1456
1457             def symlink_if_valid(path):
1458                 f = utils.open_file(path)
1459                 md5sum = apt_pkg.md5sum(f)
1460                 f.close()
1461
1462                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1463                 expected = (int(entry['size']), entry['md5sum'])
1464
1465                 if fingerprint != expected:
1466                     return False
1467
1468                 dest = os.path.join(target_dir, filename)
1469
1470                 os.symlink(path, dest)
1471                 symlinked.append(dest)
1472
1473                 return True
1474
1475             session_ = session
1476             if session is None:
1477                 session_ = DBConn().session()
1478
1479             found = False
1480
1481             # Look in the pool
1482             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1483                 poolfile_path = os.path.join(
1484                     poolfile.location.path, poolfile.filename
1485                 )
1486
1487                 if symlink_if_valid(poolfile_path):
1488                     found = True
1489                     break
1490
1491             if session is None:
1492                 session_.close()
1493
1494             if found:
1495                 continue
1496
1497             # Look in some other queues for the file
1498             queues = ('New', 'Byhand', 'ProposedUpdates',
1499                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1500
1501             for queue in queues:
1502                 if not cnf.get('Dir::Queue::%s' % queue):
1503                     continue
1504
1505                 queuefile_path = os.path.join(
1506                     cnf['Dir::Queue::%s' % queue], filename
1507                 )
1508
1509                 if not os.path.exists(queuefile_path):
1510                     # Does not exist in this queue
1511                     continue
1512
1513                 if symlink_if_valid(queuefile_path):
1514                     break
1515
1516         return symlinked
1517
1518     ###########################################################################
1519
1520     def check_lintian(self):
1521         """
1522         Extends self.rejects by checking the output of lintian against tags
1523         specified in Dinstall::LintianTags.
1524         """
1525
1526         cnf = Config()
1527
1528         # Don't reject binary uploads
1529         if not self.pkg.changes['architecture'].has_key('source'):
1530             return
1531
1532         # Only check some distributions
1533         for dist in ('unstable', 'experimental'):
1534             if dist in self.pkg.changes['distribution']:
1535                 break
1536         else:
1537             return
1538
1539         # If we do not have a tagfile, don't do anything
1540         tagfile = cnf.get("Dinstall::LintianTags")
1541         if not tagfile:
1542             return
1543
1544         # Parse the yaml file
1545         sourcefile = file(tagfile, 'r')
1546         sourcecontent = sourcefile.read()
1547         sourcefile.close()
1548
1549         try:
1550             lintiantags = yaml.load(sourcecontent)['lintian']
1551         except yaml.YAMLError, msg:
1552             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1553             return
1554
1555         # Try and find all orig mentioned in the .dsc
1556         symlinked = self.ensure_orig()
1557
1558         # Setup the input file for lintian
1559         fd, temp_filename = utils.temp_filename()
1560         temptagfile = os.fdopen(fd, 'w')
1561         for tags in lintiantags.values():
1562             temptagfile.writelines(['%s\n' % x for x in tags])
1563         temptagfile.close()
1564
1565         try:
1566             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1567                 (temp_filename, self.pkg.changes_file)
1568
1569             result, output = commands.getstatusoutput(cmd)
1570         finally:
1571             # Remove our tempfile and any symlinks we created
1572             os.unlink(temp_filename)
1573
1574             for symlink in symlinked:
1575                 os.unlink(symlink)
1576
1577         if result == 2:
1578             utils.warn("lintian failed for %s [return code: %s]." % \
1579                 (self.pkg.changes_file, result))
1580             utils.warn(utils.prefix_multi_line_string(output, \
1581                 " [possible output:] "))
1582
1583         def log(*txt):
1584             if self.logger:
1585                 self.logger.log(
1586                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1587                 )
1588
1589         # Generate messages
1590         parsed_tags = parse_lintian_output(output)
1591         self.rejects.extend(
1592             generate_reject_messages(parsed_tags, lintiantags, log=log)
1593         )
1594
1595     ###########################################################################
1596     def check_urgency(self):
1597         cnf = Config()
1598         if self.pkg.changes["architecture"].has_key("source"):
1599             if not self.pkg.changes.has_key("urgency"):
1600                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1601             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1602             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1603                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1604                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1605                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1606
1607     ###########################################################################
1608
1609     # Sanity check the time stamps of files inside debs.
1610     # [Files in the near future cause ugly warnings and extreme time
1611     #  travel can cause errors on extraction]
1612
1613     def check_timestamps(self):
1614         Cnf = Config()
1615
1616         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1617         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1618         tar = TarTime(future_cutoff, past_cutoff)
1619
1620         for filename, entry in self.pkg.files.items():
1621             if entry["type"] == "deb":
1622                 tar.reset()
1623                 try:
1624                     deb_file = utils.open_file(filename)
1625                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1626                     deb_file.seek(0)
1627                     try:
1628                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1629                     except SystemError, e:
1630                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1631                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1632                             raise
1633                         deb_file.seek(0)
1634                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1635
1636                     deb_file.close()
1637
1638                     future_files = tar.future_files.keys()
1639                     if future_files:
1640                         num_future_files = len(future_files)
1641                         future_file = future_files[0]
1642                         future_date = tar.future_files[future_file]
1643                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1644                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1645
1646                     ancient_files = tar.ancient_files.keys()
1647                     if ancient_files:
1648                         num_ancient_files = len(ancient_files)
1649                         ancient_file = ancient_files[0]
1650                         ancient_date = tar.ancient_files[ancient_file]
1651                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1652                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1653                 except:
1654                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1655
1656     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1657         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1658             sponsored = False
1659         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1660             sponsored = False
1661             if uid_name == "":
1662                 sponsored = True
1663         else:
1664             sponsored = True
1665             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1666                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1667                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1668                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1669                         self.pkg.changes["sponsoremail"] = uid_email
1670
1671         return sponsored
1672
1673
1674     ###########################################################################
1675     # check_signed_by_key checks
1676     ###########################################################################
1677
1678     def check_signed_by_key(self):
1679         """Ensure the .changes is signed by an authorized uploader."""
1680         session = DBConn().session()
1681
1682         # First of all we check that the person has proper upload permissions
1683         # and that this upload isn't blocked
1684         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1685
1686         if fpr is None:
1687             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1688             return
1689
1690         # TODO: Check that import-keyring adds UIDs properly
1691         if not fpr.uid:
1692             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1693             return
1694
1695         # Check that the fingerprint which uploaded has permission to do so
1696         self.check_upload_permissions(fpr, session)
1697
1698         # Check that this package is not in a transition
1699         self.check_transition(session)
1700
1701         session.close()
1702
1703
1704     def check_upload_permissions(self, fpr, session):
1705         # Check any one-off upload blocks
1706         self.check_upload_blocks(fpr, session)
1707
1708         # Start with DM as a special case
1709         # DM is a special case unfortunately, so we check it first
1710         # (keys with no source access get more access than DMs in one
1711         #  way; DMs can only upload for their packages whether source
1712         #  or binary, whereas keys with no access might be able to
1713         #  upload some binaries)
1714         if fpr.source_acl.access_level == 'dm':
1715             self.check_dm_upload(fpr, session)
1716         else:
1717             # Check source-based permissions for other types
1718             if self.pkg.changes["architecture"].has_key("source") and \
1719                 fpr.source_acl.access_level is None:
1720                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1721                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1722                 self.rejects.append(rej)
1723                 return
1724             # If not a DM, we allow full upload rights
1725             uid_email = "%s@debian.org" % (fpr.uid.uid)
1726             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1727
1728
1729         # Check binary upload permissions
1730         # By this point we know that DMs can't have got here unless they
1731         # are allowed to deal with the package concerned so just apply
1732         # normal checks
1733         if fpr.binary_acl.access_level == 'full':
1734             return
1735
1736         # Otherwise we're in the map case
1737         tmparches = self.pkg.changes["architecture"].copy()
1738         tmparches.pop('source', None)
1739
1740         for bam in fpr.binary_acl_map:
1741             tmparches.pop(bam.architecture.arch_string, None)
1742
1743         if len(tmparches.keys()) > 0:
1744             if fpr.binary_reject:
1745                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1746                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1747                 self.rejects.append(rej)
1748             else:
1749                 # TODO: This is where we'll implement reject vs throw away binaries later
1750                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1751                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1752                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1753                 self.rejects.append(rej)
1754
1755
1756     def check_upload_blocks(self, fpr, session):
1757         """Check whether any upload blocks apply to this source, source
1758            version, uid / fpr combination"""
1759
1760         def block_rej_template(fb):
1761             rej = 'Manual upload block in place for package %s' % fb.source
1762             if fb.version is not None:
1763                 rej += ', version %s' % fb.version
1764             return rej
1765
1766         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1767             # version is None if the block applies to all versions
1768             if fb.version is None or fb.version == self.pkg.changes['version']:
1769                 # Check both fpr and uid - either is enough to cause a reject
1770                 if fb.fpr is not None:
1771                     if fb.fpr.fingerprint == fpr.fingerprint:
1772                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1773                 if fb.uid is not None:
1774                     if fb.uid == fpr.uid:
1775                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1776
1777
1778     def check_dm_upload(self, fpr, session):
1779         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1780         ## none of the uploaded packages are NEW
1781         rej = False
1782         for f in self.pkg.files.keys():
1783             if self.pkg.files[f].has_key("byhand"):
1784                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1785                 rej = True
1786             if self.pkg.files[f].has_key("new"):
1787                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1788                 rej = True
1789
1790         if rej:
1791             return
1792
1793         r = get_newest_source(self.pkg.changes["source"], session)
1794
1795         if r is None:
1796             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1797             self.rejects.append(rej)
1798             return
1799
1800         if not r.dm_upload_allowed:
1801             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1802             self.rejects.append(rej)
1803             return
1804
1805         ## the Maintainer: field of the uploaded .changes file corresponds with
1806         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1807         ## uploads)
1808         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1809             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1810
1811         ## the most recent version of the package uploaded to unstable or
1812         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1813         ## non-developer maintainers cannot NMU or hijack packages)
1814
1815         # srcuploaders includes the maintainer
1816         accept = False
1817         for sup in r.srcuploaders:
1818             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1819             # Eww - I hope we never have two people with the same name in Debian
1820             if email == fpr.uid.uid or name == fpr.uid.name:
1821                 accept = True
1822                 break
1823
1824         if not accept:
1825             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1826             return
1827
1828         ## none of the packages are being taken over from other source packages
1829         for b in self.pkg.changes["binary"].keys():
1830             for suite in self.pkg.changes["distribution"].keys():
1831                 for s in get_source_by_package_and_suite(b, suite, session):
1832                     if s.source != self.pkg.changes["source"]:
1833                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1834
1835
1836
1837     def check_transition(self, session):
1838         cnf = Config()
1839
1840         sourcepkg = self.pkg.changes["source"]
1841
1842         # No sourceful upload -> no need to do anything else, direct return
1843         # We also work with unstable uploads, not experimental or those going to some
1844         # proposed-updates queue
1845         if "source" not in self.pkg.changes["architecture"] or \
1846            "unstable" not in self.pkg.changes["distribution"]:
1847             return
1848
1849         # Also only check if there is a file defined (and existant) with
1850         # checks.
1851         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1852         if transpath == "" or not os.path.exists(transpath):
1853             return
1854
1855         # Parse the yaml file
1856         sourcefile = file(transpath, 'r')
1857         sourcecontent = sourcefile.read()
1858         try:
1859             transitions = yaml.load(sourcecontent)
1860         except yaml.YAMLError, msg:
1861             # This shouldn't happen, there is a wrapper to edit the file which
1862             # checks it, but we prefer to be safe than ending up rejecting
1863             # everything.
1864             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1865             return
1866
1867         # Now look through all defined transitions
1868         for trans in transitions:
1869             t = transitions[trans]
1870             source = t["source"]
1871             expected = t["new"]
1872
1873             # Will be None if nothing is in testing.
1874             current = get_source_in_suite(source, "testing", session)
1875             if current is not None:
1876                 compare = apt_pkg.VersionCompare(current.version, expected)
1877
1878             if current is None or compare < 0:
1879                 # This is still valid, the current version in testing is older than
1880                 # the new version we wait for, or there is none in testing yet
1881
1882                 # Check if the source we look at is affected by this.
1883                 if sourcepkg in t['packages']:
1884                     # The source is affected, lets reject it.
1885
1886                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1887                         sourcepkg, trans)
1888
1889                     if current is not None:
1890                         currentlymsg = "at version %s" % (current.version)
1891                     else:
1892                         currentlymsg = "not present in testing"
1893
1894                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1895
1896                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1897 is part of a testing transition designed to get %s migrated (it is
1898 currently %s, we need version %s).  This transition is managed by the
1899 Release Team, and %s is the Release-Team member responsible for it.
1900 Please mail debian-release@lists.debian.org or contact %s directly if you
1901 need further assistance.  You might want to upload to experimental until this
1902 transition is done."""
1903                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1904
1905                     self.rejects.append(rejectmsg)
1906                     return
1907
1908     ###########################################################################
1909     # End check_signed_by_key checks
1910     ###########################################################################
1911
1912     def build_summaries(self):
1913         """ Build a summary of changes the upload introduces. """
1914
1915         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1916
1917         short_summary = summary
1918
1919         # This is for direport's benefit...
1920         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1921
1922         if byhand or new:
1923             summary += "Changes: " + f
1924
1925         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1926
1927         summary += self.announce(short_summary, 0)
1928
1929         return (summary, short_summary)
1930
1931     ###########################################################################
1932
1933     def close_bugs(self, summary, action):
1934         """
1935         Send mail to close bugs as instructed by the closes field in the changes file.
1936         Also add a line to summary if any work was done.
1937
1938         @type summary: string
1939         @param summary: summary text, as given by L{build_summaries}
1940
1941         @type action: bool
1942         @param action: Set to false no real action will be done.
1943
1944         @rtype: string
1945         @return: summary. If action was taken, extended by the list of closed bugs.
1946
1947         """
1948
1949         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1950
1951         bugs = self.pkg.changes["closes"].keys()
1952
1953         if not bugs:
1954             return summary
1955
1956         bugs.sort()
1957         summary += "Closing bugs: "
1958         for bug in bugs:
1959             summary += "%s " % (bug)
1960             if action:
1961                 self.update_subst()
1962                 self.Subst["__BUG_NUMBER__"] = bug
1963                 if self.pkg.changes["distribution"].has_key("stable"):
1964                     self.Subst["__STABLE_WARNING__"] = """
1965 Note that this package is not part of the released stable Debian
1966 distribution.  It may have dependencies on other unreleased software,
1967 or other instabilities.  Please take care if you wish to install it.
1968 The update will eventually make its way into the next released Debian
1969 distribution."""
1970                 else:
1971                     self.Subst["__STABLE_WARNING__"] = ""
1972                 mail_message = utils.TemplateSubst(self.Subst, template)
1973                 utils.send_mail(mail_message)
1974
1975                 # Clear up after ourselves
1976                 del self.Subst["__BUG_NUMBER__"]
1977                 del self.Subst["__STABLE_WARNING__"]
1978
1979         if action and self.logger:
1980             self.logger.log(["closing bugs"] + bugs)
1981
1982         summary += "\n"
1983
1984         return summary
1985
1986     ###########################################################################
1987
1988     def announce(self, short_summary, action):
1989         """
1990         Send an announce mail about a new upload.
1991
1992         @type short_summary: string
1993         @param short_summary: Short summary text to include in the mail
1994
1995         @type action: bool
1996         @param action: Set to false no real action will be done.
1997
1998         @rtype: string
1999         @return: Textstring about action taken.
2000
2001         """
2002
2003         cnf = Config()
2004         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2005
2006         # Only do announcements for source uploads with a recent dpkg-dev installed
2007         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2008            self.pkg.changes["architecture"].has_key("source"):
2009             return ""
2010
2011         lists_done = {}
2012         summary = ""
2013
2014         self.Subst["__SHORT_SUMMARY__"] = short_summary
2015
2016         for dist in self.pkg.changes["distribution"].keys():
2017             suite = get_suite(dist)
2018             if suite is None: continue
2019             announce_list = suite.announce
2020             if announce_list == "" or lists_done.has_key(announce_list):
2021                 continue
2022
2023             lists_done[announce_list] = 1
2024             summary += "Announcing to %s\n" % (announce_list)
2025
2026             if action:
2027                 self.update_subst()
2028                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2029                 if cnf.get("Dinstall::TrackingServer") and \
2030                    self.pkg.changes["architecture"].has_key("source"):
2031                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2032                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2033
2034                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2035                 utils.send_mail(mail_message)
2036
2037                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2038
2039         if cnf.FindB("Dinstall::CloseBugs"):
2040             summary = self.close_bugs(summary, action)
2041
2042         del self.Subst["__SHORT_SUMMARY__"]
2043
2044         return summary
2045
2046     ###########################################################################
2047     @session_wrapper
2048     def accept (self, summary, short_summary, session=None):
2049         """
2050         Accept an upload.
2051
2052         This moves all files referenced from the .changes into the pool,
2053         sends the accepted mail, announces to lists, closes bugs and
2054         also checks for override disparities. If enabled it will write out
2055         the version history for the BTS Version Tracking and will finally call
2056         L{queue_build}.
2057
2058         @type summary: string
2059         @param summary: Summary text
2060
2061         @type short_summary: string
2062         @param short_summary: Short summary
2063         """
2064
2065         cnf = Config()
2066         stats = SummaryStats()
2067
2068         print "Installing."
2069         self.logger.log(["installing changes", self.pkg.changes_file])
2070
2071         poolfiles = []
2072
2073         # Add the .dsc file to the DB first
2074         for newfile, entry in self.pkg.files.items():
2075             if entry["type"] == "dsc":
2076                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2077                 for j in pfs:
2078                     poolfiles.append(j)
2079
2080         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2081         for newfile, entry in self.pkg.files.items():
2082             if entry["type"] == "deb":
2083                 poolfiles.append(add_deb_to_db(self, newfile, session))
2084
2085         # If this is a sourceful diff only upload that is moving
2086         # cross-component we need to copy the .orig files into the new
2087         # component too for the same reasons as above.
2088         # XXX: mhy: I think this should be in add_dsc_to_db
2089         if self.pkg.changes["architecture"].has_key("source"):
2090             for orig_file in self.pkg.orig_files.keys():
2091                 if not self.pkg.orig_files[orig_file].has_key("id"):
2092                     continue # Skip if it's not in the pool
2093                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2094                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2095                     continue # Skip if the location didn't change
2096
2097                 # Do the move
2098                 oldf = get_poolfile_by_id(orig_file_id, session)
2099                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2100                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2101                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2102
2103                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2104
2105                 # TODO: Care about size/md5sum collisions etc
2106                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2107
2108                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2109                 if newf is None:
2110                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2111                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2112
2113                     session.flush()
2114
2115                     # Don't reference the old file from this changes
2116                     for p in poolfiles:
2117                         if p.file_id == oldf.file_id:
2118                             poolfiles.remove(p)
2119
2120                     poolfiles.append(newf)
2121
2122                     # Fix up the DSC references
2123                     toremove = []
2124
2125                     for df in source.srcfiles:
2126                         if df.poolfile.file_id == oldf.file_id:
2127                             # Add a new DSC entry and mark the old one for deletion
2128                             # Don't do it in the loop so we don't change the thing we're iterating over
2129                             newdscf = DSCFile()
2130                             newdscf.source_id = source.source_id
2131                             newdscf.poolfile_id = newf.file_id
2132                             session.add(newdscf)
2133
2134                             toremove.append(df)
2135
2136                     for df in toremove:
2137                         session.delete(df)
2138
2139                     # Flush our changes
2140                     session.flush()
2141
2142                     # Make sure that our source object is up-to-date
2143                     session.expire(source)
2144
2145         # Add changelog information to the database
2146         self.store_changelog()
2147
2148         # Install the files into the pool
2149         for newfile, entry in self.pkg.files.items():
2150             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2151             utils.move(newfile, destination)
2152             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2153             stats.accept_bytes += float(entry["size"])
2154
2155         # Copy the .changes file across for suite which need it.
2156         copy_changes = dict([(x.copychanges, '')
2157                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2158                              if x.copychanges is not None])
2159
2160         for dest in copy_changes.keys():
2161             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2162
2163         # We're done - commit the database changes
2164         session.commit()
2165         # Our SQL session will automatically start a new transaction after
2166         # the last commit
2167
2168         # Move the .changes into the 'done' directory
2169         utils.move(self.pkg.changes_file,
2170                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2171
2172         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2173             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2174
2175         self.update_subst()
2176         self.Subst["__SUMMARY__"] = summary
2177         mail_message = utils.TemplateSubst(self.Subst,
2178                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2179         utils.send_mail(mail_message)
2180         self.announce(short_summary, 1)
2181
2182         ## Helper stuff for DebBugs Version Tracking
2183         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2184             if self.pkg.changes["architecture"].has_key("source"):
2185                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2186                 version_history = os.fdopen(fd, 'w')
2187                 version_history.write(self.pkg.dsc["bts changelog"])
2188                 version_history.close()
2189                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2190                                       self.pkg.changes_file[:-8]+".versions")
2191                 os.rename(temp_filename, filename)
2192                 os.chmod(filename, 0644)
2193
2194             # Write out the binary -> source mapping.
2195             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2196             debinfo = os.fdopen(fd, 'w')
2197             for name, entry in sorted(self.pkg.files.items()):
2198                 if entry["type"] == "deb":
2199                     line = " ".join([entry["package"], entry["version"],
2200                                      entry["architecture"], entry["source package"],
2201                                      entry["source version"]])
2202                     debinfo.write(line+"\n")
2203             debinfo.close()
2204             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2205                                   self.pkg.changes_file[:-8]+".debinfo")
2206             os.rename(temp_filename, filename)
2207             os.chmod(filename, 0644)
2208
2209         session.commit()
2210
2211         # Set up our copy queues (e.g. buildd queues)
2212         for suite_name in self.pkg.changes["distribution"].keys():
2213             suite = get_suite(suite_name, session)
2214             for q in suite.copy_queues:
2215                 for f in poolfiles:
2216                     q.add_file_from_pool(f)
2217
2218         session.commit()
2219
2220         # Finally...
2221         stats.accept_count += 1
2222
2223     def check_override(self):
2224         """
2225         Checks override entries for validity. Mails "Override disparity" warnings,
2226         if that feature is enabled.
2227
2228         Abandons the check if
2229           - override disparity checks are disabled
2230           - mail sending is disabled
2231         """
2232
2233         cnf = Config()
2234
2235         # Abandon the check if override disparity checks have been disabled
2236         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2237             return
2238
2239         summary = self.pkg.check_override()
2240
2241         if summary == "":
2242             return
2243
2244         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2245
2246         self.update_subst()
2247         self.Subst["__SUMMARY__"] = summary
2248         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2249         utils.send_mail(mail_message)
2250         del self.Subst["__SUMMARY__"]
2251
2252     ###########################################################################
2253
2254     def remove(self, from_dir=None):
2255         """
2256         Used (for instance) in p-u to remove the package from unchecked
2257
2258         Also removes the package from holding area.
2259         """
2260         if from_dir is None:
2261             from_dir = self.pkg.directory
2262         h = Holding()
2263
2264         for f in self.pkg.files.keys():
2265             os.unlink(os.path.join(from_dir, f))
2266             if os.path.exists(os.path.join(h.holding_dir, f)):
2267                 os.unlink(os.path.join(h.holding_dir, f))
2268
2269         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2270         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2271             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2272
2273     ###########################################################################
2274
2275     def move_to_queue (self, queue):
2276         """
2277         Move files to a destination queue using the permissions in the table
2278         """
2279         h = Holding()
2280         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2281                    queue.path, perms=int(queue.change_perms, 8))
2282         for f in self.pkg.files.keys():
2283             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2284
2285     ###########################################################################
2286
2287     def force_reject(self, reject_files):
2288         """
2289         Forcefully move files from the current directory to the
2290         reject directory.  If any file already exists in the reject
2291         directory it will be moved to the morgue to make way for
2292         the new file.
2293
2294         @type reject_files: dict
2295         @param reject_files: file dictionary
2296
2297         """
2298
2299         cnf = Config()
2300
2301         for file_entry in reject_files:
2302             # Skip any files which don't exist or which we don't have permission to copy.
2303             if os.access(file_entry, os.R_OK) == 0:
2304                 continue
2305
2306             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2307
2308             try:
2309                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2310             except OSError, e:
2311                 # File exists?  Let's find a new name by adding a number
2312                 if e.errno == errno.EEXIST:
2313                     try:
2314                         dest_file = utils.find_next_free(dest_file, 255)
2315                     except NoFreeFilenameError:
2316                         # Something's either gone badly Pete Tong, or
2317                         # someone is trying to exploit us.
2318                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2319                         return
2320
2321                     # Make sure we really got it
2322                     try:
2323                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2324                     except OSError, e:
2325                         # Likewise
2326                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2327                         return
2328                 else:
2329                     raise
2330             # If we got here, we own the destination file, so we can
2331             # safely overwrite it.
2332             utils.move(file_entry, dest_file, 1, perms=0660)
2333             os.close(dest_fd)
2334
2335     ###########################################################################
2336     def do_reject (self, manual=0, reject_message="", notes=""):
2337         """
2338         Reject an upload. If called without a reject message or C{manual} is
2339         true, spawn an editor so the user can write one.
2340
2341         @type manual: bool
2342         @param manual: manual or automated rejection
2343
2344         @type reject_message: string
2345         @param reject_message: A reject message
2346
2347         @return: 0
2348
2349         """
2350         # If we weren't given a manual rejection message, spawn an
2351         # editor so the user can add one in...
2352         if manual and not reject_message:
2353             (fd, temp_filename) = utils.temp_filename()
2354             temp_file = os.fdopen(fd, 'w')
2355             if len(notes) > 0:
2356                 for note in notes:
2357                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2358                                     % (note.author, note.version, note.notedate, note.comment))
2359             temp_file.close()
2360             editor = os.environ.get("EDITOR","vi")
2361             answer = 'E'
2362             while answer == 'E':
2363                 os.system("%s %s" % (editor, temp_filename))
2364                 temp_fh = utils.open_file(temp_filename)
2365                 reject_message = "".join(temp_fh.readlines())
2366                 temp_fh.close()
2367                 print "Reject message:"
2368                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2369                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2370                 answer = "XXX"
2371                 while prompt.find(answer) == -1:
2372                     answer = utils.our_raw_input(prompt)
2373                     m = re_default_answer.search(prompt)
2374                     if answer == "":
2375                         answer = m.group(1)
2376                     answer = answer[:1].upper()
2377             os.unlink(temp_filename)
2378             if answer == 'A':
2379                 return 1
2380             elif answer == 'Q':
2381                 sys.exit(0)
2382
2383         print "Rejecting.\n"
2384
2385         cnf = Config()
2386
2387         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2388         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2389
2390         # Move all the files into the reject directory
2391         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2392         self.force_reject(reject_files)
2393
2394         # If we fail here someone is probably trying to exploit the race
2395         # so let's just raise an exception ...
2396         if os.path.exists(reason_filename):
2397             os.unlink(reason_filename)
2398         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2399
2400         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2401
2402         self.update_subst()
2403         if not manual:
2404             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2405             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2406             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2407             os.write(reason_fd, reject_message)
2408             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2409         else:
2410             # Build up the rejection email
2411             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2412             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2413             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2414             self.Subst["__REJECT_MESSAGE__"] = ""
2415             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2416             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2417             # Write the rejection email out as the <foo>.reason file
2418             os.write(reason_fd, reject_mail_message)
2419
2420         del self.Subst["__REJECTOR_ADDRESS__"]
2421         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2422         del self.Subst["__CC__"]
2423
2424         os.close(reason_fd)
2425
2426         # Send the rejection mail
2427         utils.send_mail(reject_mail_message)
2428
2429         if self.logger:
2430             self.logger.log(["rejected", self.pkg.changes_file])
2431
2432         return 0
2433
2434     ################################################################################
2435     def in_override_p(self, package, component, suite, binary_type, filename, session):
2436         """
2437         Check if a package already has override entries in the DB
2438
2439         @type package: string
2440         @param package: package name
2441
2442         @type component: string
2443         @param component: database id of the component
2444
2445         @type suite: int
2446         @param suite: database id of the suite
2447
2448         @type binary_type: string
2449         @param binary_type: type of the package
2450
2451         @type filename: string
2452         @param filename: filename we check
2453
2454         @return: the database result. But noone cares anyway.
2455
2456         """
2457
2458         cnf = Config()
2459
2460         if binary_type == "": # must be source
2461             file_type = "dsc"
2462         else:
2463             file_type = binary_type
2464
2465         # Override suite name; used for example with proposed-updates
2466         oldsuite = get_suite(suite, session)
2467         if (not oldsuite is None) and oldsuite.overridesuite:
2468             suite = oldsuite.overridesuite
2469
2470         result = get_override(package, suite, component, file_type, session)
2471
2472         # If checking for a source package fall back on the binary override type
2473         if file_type == "dsc" and len(result) < 1:
2474             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2475
2476         # Remember the section and priority so we can check them later if appropriate
2477         if len(result) > 0:
2478             result = result[0]
2479             self.pkg.files[filename]["override section"] = result.section.section
2480             self.pkg.files[filename]["override priority"] = result.priority.priority
2481             return result
2482
2483         return None
2484
2485     ################################################################################
2486     def get_anyversion(self, sv_list, suite):
2487         """
2488         @type sv_list: list
2489         @param sv_list: list of (suite, version) tuples to check
2490
2491         @type suite: string
2492         @param suite: suite name
2493
2494         Description: TODO
2495         """
2496         Cnf = Config()
2497         anyversion = None
2498         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2499         for (s, v) in sv_list:
2500             if s in [ x.lower() for x in anysuite ]:
2501                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2502                     anyversion = v
2503
2504         return anyversion
2505
2506     ################################################################################
2507
2508     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2509         """
2510         @type sv_list: list
2511         @param sv_list: list of (suite, version) tuples to check
2512
2513         @type filename: string
2514         @param filename: XXX
2515
2516         @type new_version: string
2517         @param new_version: XXX
2518
2519         Ensure versions are newer than existing packages in target
2520         suites and that cross-suite version checking rules as
2521         set out in the conf file are satisfied.
2522         """
2523
2524         cnf = Config()
2525
2526         # Check versions for each target suite
2527         for target_suite in self.pkg.changes["distribution"].keys():
2528             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2529             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2530
2531             # Enforce "must be newer than target suite" even if conffile omits it
2532             if target_suite not in must_be_newer_than:
2533                 must_be_newer_than.append(target_suite)
2534
2535             for (suite, existent_version) in sv_list:
2536                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2537
2538                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2539                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2540
2541                 if suite in must_be_older_than and vercmp > -1:
2542                     cansave = 0
2543
2544                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2545                         # we really use the other suite, ignoring the conflicting one ...
2546                         addsuite = self.pkg.changes["distribution-version"][suite]
2547
2548                         add_version = self.get_anyversion(sv_list, addsuite)
2549                         target_version = self.get_anyversion(sv_list, target_suite)
2550
2551                         if not add_version:
2552                             # not add_version can only happen if we map to a suite
2553                             # that doesn't enhance the suite we're propup'ing from.
2554                             # so "propup-ver x a b c; map a d" is a problem only if
2555                             # d doesn't enhance a.
2556                             #
2557                             # i think we could always propagate in this case, rather
2558                             # than complaining. either way, this isn't a REJECT issue
2559                             #
2560                             # And - we really should complain to the dorks who configured dak
2561                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2562                             self.pkg.changes.setdefault("propdistribution", {})
2563                             self.pkg.changes["propdistribution"][addsuite] = 1
2564                             cansave = 1
2565                         elif not target_version:
2566                             # not targets_version is true when the package is NEW
2567                             # we could just stick with the "...old version..." REJECT
2568                             # for this, I think.
2569                             self.rejects.append("Won't propogate NEW packages.")
2570                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2571                             # propogation would be redundant. no need to reject though.
2572                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2573                             cansave = 1
2574                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2575                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2576                             # propogate!!
2577                             self.warnings.append("Propogating upload to %s" % (addsuite))
2578                             self.pkg.changes.setdefault("propdistribution", {})
2579                             self.pkg.changes["propdistribution"][addsuite] = 1
2580                             cansave = 1
2581
2582                     if not cansave:
2583                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2584
2585     ################################################################################
2586     def check_binary_against_db(self, filename, session):
2587         # Ensure version is sane
2588         self.cross_suite_version_check( \
2589             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2590                 self.pkg.files[filename]["architecture"], session),
2591             filename, self.pkg.files[filename]["version"], sourceful=False)
2592
2593         # Check for any existing copies of the file
2594         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2595         q = q.filter_by(version=self.pkg.files[filename]["version"])
2596         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2597
2598         if q.count() > 0:
2599             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2600
2601     ################################################################################
2602
2603     def check_source_against_db(self, filename, session):
2604         source = self.pkg.dsc.get("source")
2605         version = self.pkg.dsc.get("version")
2606
2607         # Ensure version is sane
2608         self.cross_suite_version_check( \
2609             get_suite_version_by_source(source, session), filename, version,
2610             sourceful=True)
2611
2612     ################################################################################
2613     def check_dsc_against_db(self, filename, session):
2614         """
2615
2616         @warning: NB: this function can remove entries from the 'files' index [if
2617          the orig tarball is a duplicate of the one in the archive]; if
2618          you're iterating over 'files' and call this function as part of
2619          the loop, be sure to add a check to the top of the loop to
2620          ensure you haven't just tried to dereference the deleted entry.
2621
2622         """
2623
2624         Cnf = Config()
2625         self.pkg.orig_files = {} # XXX: do we need to clear it?
2626         orig_files = self.pkg.orig_files
2627
2628         # Try and find all files mentioned in the .dsc.  This has
2629         # to work harder to cope with the multiple possible
2630         # locations of an .orig.tar.gz.
2631         # The ordering on the select is needed to pick the newest orig
2632         # when it exists in multiple places.
2633         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2634             found = None
2635             if self.pkg.files.has_key(dsc_name):
2636                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2637                 actual_size = int(self.pkg.files[dsc_name]["size"])
2638                 found = "%s in incoming" % (dsc_name)
2639
2640                 # Check the file does not already exist in the archive
2641                 ql = get_poolfile_like_name(dsc_name, session)
2642
2643                 # Strip out anything that isn't '%s' or '/%s$'
2644                 for i in ql:
2645                     if not i.filename.endswith(dsc_name):
2646                         ql.remove(i)
2647
2648                 # "[dak] has not broken them.  [dak] has fixed a
2649                 # brokenness.  Your crappy hack exploited a bug in
2650                 # the old dinstall.
2651                 #
2652                 # "(Come on!  I thought it was always obvious that
2653                 # one just doesn't release different files with
2654                 # the same name and version.)"
2655                 #                        -- ajk@ on d-devel@l.d.o
2656
2657                 if len(ql) > 0:
2658                     # Ignore exact matches for .orig.tar.gz
2659                     match = 0
2660                     if re_is_orig_source.match(dsc_name):
2661                         for i in ql:
2662                             if self.pkg.files.has_key(dsc_name) and \
2663                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2664                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2665                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2666                                 # TODO: Don't delete the entry, just mark it as not needed
2667                                 # This would fix the stupidity of changing something we often iterate over
2668                                 # whilst we're doing it
2669                                 del self.pkg.files[dsc_name]
2670                                 dsc_entry["files id"] = i.file_id
2671                                 if not orig_files.has_key(dsc_name):
2672                                     orig_files[dsc_name] = {}
2673                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2674                                 match = 1
2675
2676                                 # Don't bitch that we couldn't find this file later
2677                                 try:
2678                                     self.later_check_files.remove(dsc_name)
2679                                 except ValueError:
2680                                     pass
2681
2682
2683                     if not match:
2684                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2685
2686             elif re_is_orig_source.match(dsc_name):
2687                 # Check in the pool
2688                 ql = get_poolfile_like_name(dsc_name, session)
2689
2690                 # Strip out anything that isn't '%s' or '/%s$'
2691                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2692                 for i in ql:
2693                     if not i.filename.endswith(dsc_name):
2694                         ql.remove(i)
2695
2696                 if len(ql) > 0:
2697                     # Unfortunately, we may get more than one match here if,
2698                     # for example, the package was in potato but had an -sa
2699                     # upload in woody.  So we need to choose the right one.
2700
2701                     # default to something sane in case we don't match any or have only one
2702                     x = ql[0]
2703
2704                     if len(ql) > 1:
2705                         for i in ql:
2706                             old_file = os.path.join(i.location.path, i.filename)
2707                             old_file_fh = utils.open_file(old_file)
2708                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2709                             old_file_fh.close()
2710                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2711                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2712                                 x = i
2713
2714                     old_file = os.path.join(i.location.path, i.filename)
2715                     old_file_fh = utils.open_file(old_file)
2716                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2717                     old_file_fh.close()
2718                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2719                     found = old_file
2720                     suite_type = x.location.archive_type
2721                     # need this for updating dsc_files in install()
2722                     dsc_entry["files id"] = x.file_id
2723                     # See install() in process-accepted...
2724                     if not orig_files.has_key(dsc_name):
2725                         orig_files[dsc_name] = {}
2726                     orig_files[dsc_name]["id"] = x.file_id
2727                     orig_files[dsc_name]["path"] = old_file
2728                     orig_files[dsc_name]["location"] = x.location.location_id
2729                 else:
2730                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2731                     # Not there? Check the queue directories...
2732                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2733                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2734                             continue
2735                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2736                         if os.path.exists(in_otherdir):
2737                             in_otherdir_fh = utils.open_file(in_otherdir)
2738                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2739                             in_otherdir_fh.close()
2740                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2741                             found = in_otherdir
2742                             if not orig_files.has_key(dsc_name):
2743                                 orig_files[dsc_name] = {}
2744                             orig_files[dsc_name]["path"] = in_otherdir
2745
2746                     if not found:
2747                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2748                         continue
2749             else:
2750                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2751                 continue
2752             if actual_md5 != dsc_entry["md5sum"]:
2753                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2754             if actual_size != int(dsc_entry["size"]):
2755                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2756
2757     ################################################################################
2758     # This is used by process-new and process-holding to recheck a changes file
2759     # at the time we're running.  It mainly wraps various other internal functions
2760     # and is similar to accepted_checks - these should probably be tidied up
2761     # and combined
2762     def recheck(self, session):
2763         cnf = Config()
2764         for f in self.pkg.files.keys():
2765             # The .orig.tar.gz can disappear out from under us is it's a
2766             # duplicate of one in the archive.
2767             if not self.pkg.files.has_key(f):
2768                 continue
2769
2770             entry = self.pkg.files[f]
2771
2772             # Check that the source still exists
2773             if entry["type"] == "deb":
2774                 source_version = entry["source version"]
2775                 source_package = entry["source package"]
2776                 if not self.pkg.changes["architecture"].has_key("source") \
2777                    and not source_exists(source_package, source_version, \
2778                     suites = self.pkg.changes["distribution"].keys(), session = session):
2779                     source_epochless_version = re_no_epoch.sub('', source_version)
2780                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2781                     found = False
2782                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2783                         if cnf.has_key("Dir::Queue::%s" % (q)):
2784                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2785                                 found = True
2786                     if not found:
2787                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2788
2789             # Version and file overwrite checks
2790             if entry["type"] == "deb":
2791                 self.check_binary_against_db(f, session)
2792             elif entry["type"] == "dsc":
2793                 self.check_source_against_db(f, session)
2794                 self.check_dsc_against_db(f, session)
2795
2796     ################################################################################
2797     def accepted_checks(self, overwrite_checks, session):
2798         # Recheck anything that relies on the database; since that's not
2799         # frozen between accept and our run time when called from p-a.
2800
2801         # overwrite_checks is set to False when installing to stable/oldstable
2802
2803         propogate={}
2804         nopropogate={}
2805
2806         # Find the .dsc (again)
2807         dsc_filename = None
2808         for f in self.pkg.files.keys():
2809             if self.pkg.files[f]["type"] == "dsc":
2810                 dsc_filename = f
2811
2812         for checkfile in self.pkg.files.keys():
2813             # The .orig.tar.gz can disappear out from under us is it's a
2814             # duplicate of one in the archive.
2815             if not self.pkg.files.has_key(checkfile):
2816                 continue
2817
2818             entry = self.pkg.files[checkfile]
2819
2820             # Check that the source still exists
2821             if entry["type"] == "deb":
2822                 source_version = entry["source version"]
2823                 source_package = entry["source package"]
2824                 if not self.pkg.changes["architecture"].has_key("source") \
2825                    and not source_exists(source_package, source_version, \
2826                     suites = self.pkg.changes["distribution"].keys(), \
2827                     session = session):
2828                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2829
2830             # Version and file overwrite checks
2831             if overwrite_checks:
2832                 if entry["type"] == "deb":
2833                     self.check_binary_against_db(checkfile, session)
2834                 elif entry["type"] == "dsc":
2835                     self.check_source_against_db(checkfile, session)
2836                     self.check_dsc_against_db(dsc_filename, session)
2837
2838             # propogate in the case it is in the override tables:
2839             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2840                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2841                     propogate[suite] = 1
2842                 else:
2843                     nopropogate[suite] = 1
2844
2845         for suite in propogate.keys():
2846             if suite in nopropogate:
2847                 continue
2848             self.pkg.changes["distribution"][suite] = 1
2849
2850         for checkfile in self.pkg.files.keys():
2851             # Check the package is still in the override tables
2852             for suite in self.pkg.changes["distribution"].keys():
2853                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2854                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2855
2856     ################################################################################
2857     # If any file of an upload has a recent mtime then chances are good
2858     # the file is still being uploaded.
2859
2860     def upload_too_new(self):
2861         cnf = Config()
2862         too_new = False
2863         # Move back to the original directory to get accurate time stamps
2864         cwd = os.getcwd()
2865         os.chdir(self.pkg.directory)
2866         file_list = self.pkg.files.keys()
2867         file_list.extend(self.pkg.dsc_files.keys())
2868         file_list.append(self.pkg.changes_file)
2869         for f in file_list:
2870             try:
2871                 last_modified = time.time()-os.path.getmtime(f)
2872                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2873                     too_new = True
2874                     break
2875             except:
2876                 pass
2877
2878         os.chdir(cwd)
2879         return too_new
2880
2881     def store_changelog(self):
2882
2883         # Skip binary-only upload if it is not a bin-NMU
2884         if not self.pkg.changes['architecture'].has_key('source'):
2885             from daklib.regexes import re_bin_only_nmu
2886             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2887                 return
2888
2889         session = DBConn().session()
2890
2891         # Check if upload already has a changelog entry
2892         query = """SELECT changelog_id FROM changes WHERE source = :source
2893                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2894         if session.execute(query, {'source': self.pkg.changes['source'], \
2895                                    'version': self.pkg.changes['version'], \
2896                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2897             session.commit()
2898             return
2899
2900         # Add current changelog text into changelogs_text table, return created ID
2901         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2902         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2903
2904         # Link ID to the upload available in changes table
2905         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2906                    AND version = :version AND architecture = :architecture"""
2907         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2908                                 'version': self.pkg.changes['version'], \
2909                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2910
2911         session.commit()