]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Mark dsc as invalid if we find any reject reasons
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_set(dsc, session).items():
137             if not new.has_key(package):
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
292         if MTime > self.future_cutoff:
293             self.future_files[Name] = MTime
294         if MTime < self.past_cutoff:
295             self.ancient_files[Name] = MTime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
447         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
448         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
449
450         self.rejects = []
451         self.warnings = []
452         self.notes = []
453
454         self.later_check_files = []
455
456         self.pkg.reset()
457
458     def package_info(self):
459         """
460         Format various messages from this Upload to send to the maintainer.
461         """
462
463         msgs = (
464             ('Reject Reasons', self.rejects),
465             ('Warnings', self.warnings),
466             ('Notes', self.notes),
467         )
468
469         msg = ''
470         for title, messages in msgs:
471             if messages:
472                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
473         msg += '\n\n'
474
475         return msg
476
477     ###########################################################################
478     def update_subst(self):
479         """ Set up the per-package template substitution mappings """
480
481         cnf = Config()
482
483         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
484         if not self.pkg.changes.has_key("architecture") or not \
485            isinstance(self.pkg.changes["architecture"], dict):
486             self.pkg.changes["architecture"] = { "Unknown" : "" }
487
488         # and maintainer2047 may not exist.
489         if not self.pkg.changes.has_key("maintainer2047"):
490             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
491
492         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
493         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
494         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
495
496         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
497         if self.pkg.changes["architecture"].has_key("source") and \
498            self.pkg.changes["changedby822"] != "" and \
499            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
500
501             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
502             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
503             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
504         else:
505             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
506             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
508
509         # Process policy doesn't set the fingerprint field and I don't want to make it
510         # do it for now as I don't want to have to deal with the case where we accepted
511         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
512         # the meantime so the package will be remarked as rejectable.  Urgh.
513         # TODO: Fix this properly
514         if self.pkg.changes.has_key('fingerprint'):
515             session = DBConn().session()
516             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
517             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
518                 if self.pkg.changes.has_key("sponsoremail"):
519                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
520             session.close()
521
522         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
523             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
524
525         # Apply any global override of the Maintainer field
526         if cnf.get("Dinstall::OverrideMaintainer"):
527             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
528             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
529
530         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
531         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
532         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
533         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
534
535     ###########################################################################
536     def load_changes(self, filename):
537         """
538         Load a changes file and setup a dictionary around it. Also checks for mandantory
539         fields  within.
540
541         @type filename: string
542         @param filename: Changes filename, full path.
543
544         @rtype: boolean
545         @return: whether the changes file was valid or not.  We may want to
546                  reject even if this is True (see what gets put in self.rejects).
547                  This is simply to prevent us even trying things later which will
548                  fail because we couldn't properly parse the file.
549         """
550         Cnf = Config()
551         self.pkg.changes_file = filename
552
553         # Parse the .changes field into a dictionary
554         try:
555             self.pkg.changes.update(parse_changes(filename))
556         except CantOpenError:
557             self.rejects.append("%s: can't read file." % (filename))
558             return False
559         except ParseChangesError, line:
560             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
561             return False
562         except ChangesUnicodeError:
563             self.rejects.append("%s: changes file not proper utf-8" % (filename))
564             return False
565
566         # Parse the Files field from the .changes into another dictionary
567         try:
568             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
569         except ParseChangesError, line:
570             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
571             return False
572         except UnknownFormatError, format:
573             self.rejects.append("%s: unknown format '%s'." % (filename, format))
574             return False
575
576         # Check for mandatory fields
577         for i in ("distribution", "source", "binary", "architecture",
578                   "version", "maintainer", "files", "changes", "description"):
579             if not self.pkg.changes.has_key(i):
580                 # Avoid undefined errors later
581                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
582                 return False
583
584         # Strip a source version in brackets from the source field
585         if re_strip_srcver.search(self.pkg.changes["source"]):
586             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
587
588         # Ensure the source field is a valid package name.
589         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
590             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
591
592         # Split multi-value fields into a lower-level dictionary
593         for i in ("architecture", "distribution", "binary", "closes"):
594             o = self.pkg.changes.get(i, "")
595             if o != "":
596                 del self.pkg.changes[i]
597
598             self.pkg.changes[i] = {}
599
600             for j in o.split():
601                 self.pkg.changes[i][j] = 1
602
603         # Fix the Maintainer: field to be RFC822/2047 compatible
604         try:
605             (self.pkg.changes["maintainer822"],
606              self.pkg.changes["maintainer2047"],
607              self.pkg.changes["maintainername"],
608              self.pkg.changes["maintaineremail"]) = \
609                    fix_maintainer (self.pkg.changes["maintainer"])
610         except ParseMaintError, msg:
611             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
612                    % (filename, self.pkg.changes["maintainer"], msg))
613
614         # ...likewise for the Changed-By: field if it exists.
615         try:
616             (self.pkg.changes["changedby822"],
617              self.pkg.changes["changedby2047"],
618              self.pkg.changes["changedbyname"],
619              self.pkg.changes["changedbyemail"]) = \
620                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
621         except ParseMaintError, msg:
622             self.pkg.changes["changedby822"] = ""
623             self.pkg.changes["changedby2047"] = ""
624             self.pkg.changes["changedbyname"] = ""
625             self.pkg.changes["changedbyemail"] = ""
626
627             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
628                    % (filename, self.pkg.changes["changed-by"], msg))
629
630         # Ensure all the values in Closes: are numbers
631         if self.pkg.changes.has_key("closes"):
632             for i in self.pkg.changes["closes"].keys():
633                 if re_isanum.match (i) == None:
634                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
635
636         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
637         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
638         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
639
640         # Check the .changes is non-empty
641         if not self.pkg.files:
642             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
643             return False
644
645         # Changes was syntactically valid even if we'll reject
646         return True
647
648     ###########################################################################
649
650     def check_distributions(self):
651         "Check and map the Distribution field"
652
653         Cnf = Config()
654
655         # Handle suite mappings
656         for m in Cnf.ValueList("SuiteMappings"):
657             args = m.split()
658             mtype = args[0]
659             if mtype == "map" or mtype == "silent-map":
660                 (source, dest) = args[1:3]
661                 if self.pkg.changes["distribution"].has_key(source):
662                     del self.pkg.changes["distribution"][source]
663                     self.pkg.changes["distribution"][dest] = 1
664                     if mtype != "silent-map":
665                         self.notes.append("Mapping %s to %s." % (source, dest))
666                 if self.pkg.changes.has_key("distribution-version"):
667                     if self.pkg.changes["distribution-version"].has_key(source):
668                         self.pkg.changes["distribution-version"][source]=dest
669             elif mtype == "map-unreleased":
670                 (source, dest) = args[1:3]
671                 if self.pkg.changes["distribution"].has_key(source):
672                     for arch in self.pkg.changes["architecture"].keys():
673                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
674                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
675                             del self.pkg.changes["distribution"][source]
676                             self.pkg.changes["distribution"][dest] = 1
677                             break
678             elif mtype == "ignore":
679                 suite = args[1]
680                 if self.pkg.changes["distribution"].has_key(suite):
681                     del self.pkg.changes["distribution"][suite]
682                     self.warnings.append("Ignoring %s as a target suite." % (suite))
683             elif mtype == "reject":
684                 suite = args[1]
685                 if self.pkg.changes["distribution"].has_key(suite):
686                     self.rejects.append("Uploads to %s are not accepted." % (suite))
687             elif mtype == "propup-version":
688                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
689                 #
690                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
691                 if self.pkg.changes["distribution"].has_key(args[1]):
692                     self.pkg.changes.setdefault("distribution-version", {})
693                     for suite in args[2:]:
694                         self.pkg.changes["distribution-version"][suite] = suite
695
696         # Ensure there is (still) a target distribution
697         if len(self.pkg.changes["distribution"].keys()) < 1:
698             self.rejects.append("No valid distribution remaining.")
699
700         # Ensure target distributions exist
701         for suite in self.pkg.changes["distribution"].keys():
702             if not Cnf.has_key("Suite::%s" % (suite)):
703                 self.rejects.append("Unknown distribution `%s'." % (suite))
704
705     ###########################################################################
706
707     def binary_file_checks(self, f, session):
708         cnf = Config()
709         entry = self.pkg.files[f]
710
711         # Extract package control information
712         deb_file = utils.open_file(f)
713         try:
714             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
715         except:
716             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
717             deb_file.close()
718             # Can't continue, none of the checks on control would work.
719             return
720
721         # Check for mandantory "Description:"
722         deb_file.seek(0)
723         try:
724             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
725         except:
726             self.rejects.append("%s: Missing Description in binary package" % (f))
727             return
728
729         deb_file.close()
730
731         # Check for mandatory fields
732         for field in [ "Package", "Architecture", "Version" ]:
733             if control.Find(field) == None:
734                 # Can't continue
735                 self.rejects.append("%s: No %s field in control." % (f, field))
736                 return
737
738         # Ensure the package name matches the one give in the .changes
739         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
740             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
741
742         # Validate the package field
743         package = control.Find("Package")
744         if not re_valid_pkg_name.match(package):
745             self.rejects.append("%s: invalid package name '%s'." % (f, package))
746
747         # Validate the version field
748         version = control.Find("Version")
749         if not re_valid_version.match(version):
750             self.rejects.append("%s: invalid version number '%s'." % (f, version))
751
752         # Ensure the architecture of the .deb is one we know about.
753         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
754         architecture = control.Find("Architecture")
755         upload_suite = self.pkg.changes["distribution"].keys()[0]
756
757         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
758             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
759             self.rejects.append("Unknown architecture '%s'." % (architecture))
760
761         # Ensure the architecture of the .deb is one of the ones
762         # listed in the .changes.
763         if not self.pkg.changes["architecture"].has_key(architecture):
764             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
765
766         # Sanity-check the Depends field
767         depends = control.Find("Depends")
768         if depends == '':
769             self.rejects.append("%s: Depends field is empty." % (f))
770
771         # Sanity-check the Provides field
772         provides = control.Find("Provides")
773         if provides:
774             provide = re_spacestrip.sub('', provides)
775             if provide == '':
776                 self.rejects.append("%s: Provides field is empty." % (f))
777             prov_list = provide.split(",")
778             for prov in prov_list:
779                 if not re_valid_pkg_name.match(prov):
780                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
781
782         # If there is a Built-Using field, we need to check we can find the
783         # exact source version
784         built_using = control.Find("Built-Using")
785         if built_using:
786             try:
787                 entry["built-using"] = []
788                 for dep in apt_pkg.parse_depends(built_using):
789                     bu_s, bu_v, bu_e = dep[0]
790                     # Check that it's an exact match dependency and we have
791                     # some form of version
792                     if bu_e != "=" or len(bu_v) < 1:
793                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
794                     else:
795                         # Find the source id for this version
796                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
797                         if len(bu_so) != 1:
798                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
799                         else:
800                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
801
802             except ValueError, e:
803                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
804
805
806         # Check the section & priority match those given in the .changes (non-fatal)
807         if     control.Find("Section") and entry["section"] != "" \
808            and entry["section"] != control.Find("Section"):
809             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
810                                 (f, control.Find("Section", ""), entry["section"]))
811         if control.Find("Priority") and entry["priority"] != "" \
812            and entry["priority"] != control.Find("Priority"):
813             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
814                                 (f, control.Find("Priority", ""), entry["priority"]))
815
816         entry["package"] = package
817         entry["architecture"] = architecture
818         entry["version"] = version
819         entry["maintainer"] = control.Find("Maintainer", "")
820
821         if f.endswith(".udeb"):
822             self.pkg.files[f]["dbtype"] = "udeb"
823         elif f.endswith(".deb"):
824             self.pkg.files[f]["dbtype"] = "deb"
825         else:
826             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
827
828         entry["source"] = control.Find("Source", entry["package"])
829
830         # Get the source version
831         source = entry["source"]
832         source_version = ""
833
834         if source.find("(") != -1:
835             m = re_extract_src_version.match(source)
836             source = m.group(1)
837             source_version = m.group(2)
838
839         if not source_version:
840             source_version = self.pkg.files[f]["version"]
841
842         entry["source package"] = source
843         entry["source version"] = source_version
844
845         # Ensure the filename matches the contents of the .deb
846         m = re_isadeb.match(f)
847
848         #  package name
849         file_package = m.group(1)
850         if entry["package"] != file_package:
851             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
852                                 (f, file_package, entry["dbtype"], entry["package"]))
853         epochless_version = re_no_epoch.sub('', control.Find("Version"))
854
855         #  version
856         file_version = m.group(2)
857         if epochless_version != file_version:
858             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
859                                 (f, file_version, entry["dbtype"], epochless_version))
860
861         #  architecture
862         file_architecture = m.group(3)
863         if entry["architecture"] != file_architecture:
864             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
865                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
866
867         # Check for existent source
868         source_version = entry["source version"]
869         source_package = entry["source package"]
870         if self.pkg.changes["architecture"].has_key("source"):
871             if source_version != self.pkg.changes["version"]:
872                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
873                                     (source_version, f, self.pkg.changes["version"]))
874         else:
875             # Check in the SQL database
876             if not source_exists(source_package, source_version, suites = \
877                 self.pkg.changes["distribution"].keys(), session = session):
878                 # Check in one of the other directories
879                 source_epochless_version = re_no_epoch.sub('', source_version)
880                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
881                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
882                     entry["byhand"] = 1
883                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
884                     entry["new"] = 1
885                 else:
886                     dsc_file_exists = False
887                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
888                         if cnf.has_key("Dir::Queue::%s" % (myq)):
889                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
890                                 dsc_file_exists = True
891                                 break
892
893                     if not dsc_file_exists:
894                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
895
896         # Check the version and for file overwrites
897         self.check_binary_against_db(f, session)
898
899     def source_file_checks(self, f, session):
900         entry = self.pkg.files[f]
901
902         m = re_issource.match(f)
903         if not m:
904             return
905
906         entry["package"] = m.group(1)
907         entry["version"] = m.group(2)
908         entry["type"] = m.group(3)
909
910         # Ensure the source package name matches the Source filed in the .changes
911         if self.pkg.changes["source"] != entry["package"]:
912             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
913
914         # Ensure the source version matches the version in the .changes file
915         if re_is_orig_source.match(f):
916             changes_version = self.pkg.changes["chopversion2"]
917         else:
918             changes_version = self.pkg.changes["chopversion"]
919
920         if changes_version != entry["version"]:
921             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
922
923         # Ensure the .changes lists source in the Architecture field
924         if not self.pkg.changes["architecture"].has_key("source"):
925             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
926
927         # Check the signature of a .dsc file
928         if entry["type"] == "dsc":
929             # check_signature returns either:
930             #  (None, [list, of, rejects]) or (signature, [])
931             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
932             for j in rejects:
933                 self.rejects.append(j)
934
935         entry["architecture"] = "source"
936
937     def per_suite_file_checks(self, f, suite, session):
938         cnf = Config()
939         entry = self.pkg.files[f]
940
941         # Skip byhand
942         if entry.has_key("byhand"):
943             return
944
945         # Check we have fields we need to do these checks
946         oktogo = True
947         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
948             if not entry.has_key(m):
949                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
950                 oktogo = False
951
952         if not oktogo:
953             return
954
955         # Handle component mappings
956         for m in cnf.ValueList("ComponentMappings"):
957             (source, dest) = m.split()
958             if entry["component"] == source:
959                 entry["original component"] = source
960                 entry["component"] = dest
961
962         # Ensure the component is valid for the target suite
963         if cnf.has_key("Suite:%s::Components" % (suite)) and \
964            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
965             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
966             return
967
968         # Validate the component
969         if not get_component(entry["component"], session):
970             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
971             return
972
973         # See if the package is NEW
974         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
975             entry["new"] = 1
976
977         # Validate the priority
978         if entry["priority"].find('/') != -1:
979             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
980
981         # Determine the location
982         location = cnf["Dir::Pool"]
983         l = get_location(location, entry["component"], session=session)
984         if l is None:
985             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
986             entry["location id"] = -1
987         else:
988             entry["location id"] = l.location_id
989
990         # Check the md5sum & size against existing files (if any)
991         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
992
993         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
994                                          entry["size"], entry["md5sum"], entry["location id"])
995
996         if found is None:
997             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
998         elif found is False and poolfile is not None:
999             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1000         else:
1001             if poolfile is None:
1002                 entry["files id"] = None
1003             else:
1004                 entry["files id"] = poolfile.file_id
1005
1006         # Check for packages that have moved from one component to another
1007         entry['suite'] = suite
1008         arch_list = [entry["architecture"], 'all']
1009         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1010             [suite], arch_list = arch_list, session = session)
1011         if component is not None:
1012             entry["othercomponents"] = component
1013
1014     def check_files(self, action=True):
1015         file_keys = self.pkg.files.keys()
1016         holding = Holding()
1017         cnf = Config()
1018
1019         if action:
1020             cwd = os.getcwd()
1021             os.chdir(self.pkg.directory)
1022             for f in file_keys:
1023                 ret = holding.copy_to_holding(f)
1024                 if ret is not None:
1025                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1026
1027             os.chdir(cwd)
1028
1029         # check we already know the changes file
1030         # [NB: this check must be done post-suite mapping]
1031         base_filename = os.path.basename(self.pkg.changes_file)
1032
1033         session = DBConn().session()
1034
1035         try:
1036             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1037             # if in the pool or in a queue other than unchecked, reject
1038             if (dbc.in_queue is None) \
1039                    or (dbc.in_queue is not None
1040                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1041                 self.rejects.append("%s file already known to dak" % base_filename)
1042         except NoResultFound, e:
1043             # not known, good
1044             pass
1045
1046         has_binaries = False
1047         has_source = False
1048
1049         for f, entry in self.pkg.files.items():
1050             # Ensure the file does not already exist in one of the accepted directories
1051             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1052                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1053                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1054                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1055
1056             if not re_taint_free.match(f):
1057                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1058
1059             # Check the file is readable
1060             if os.access(f, os.R_OK) == 0:
1061                 # When running in -n, copy_to_holding() won't have
1062                 # generated the reject_message, so we need to.
1063                 if action:
1064                     if os.path.exists(f):
1065                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1066                     else:
1067                         # Don't directly reject, mark to check later to deal with orig's
1068                         # we can find in the pool
1069                         self.later_check_files.append(f)
1070                 entry["type"] = "unreadable"
1071                 continue
1072
1073             # If it's byhand skip remaining checks
1074             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1075                 entry["byhand"] = 1
1076                 entry["type"] = "byhand"
1077
1078             # Checks for a binary package...
1079             elif re_isadeb.match(f):
1080                 has_binaries = True
1081                 entry["type"] = "deb"
1082
1083                 # This routine appends to self.rejects/warnings as appropriate
1084                 self.binary_file_checks(f, session)
1085
1086             # Checks for a source package...
1087             elif re_issource.match(f):
1088                 has_source = True
1089
1090                 # This routine appends to self.rejects/warnings as appropriate
1091                 self.source_file_checks(f, session)
1092
1093             # Not a binary or source package?  Assume byhand...
1094             else:
1095                 entry["byhand"] = 1
1096                 entry["type"] = "byhand"
1097
1098             # Per-suite file checks
1099             entry["oldfiles"] = {}
1100             for suite in self.pkg.changes["distribution"].keys():
1101                 self.per_suite_file_checks(f, suite, session)
1102
1103         session.close()
1104
1105         # If the .changes file says it has source, it must have source.
1106         if self.pkg.changes["architecture"].has_key("source"):
1107             if not has_source:
1108                 self.rejects.append("no source found and Architecture line in changes mention source.")
1109
1110             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1111                 self.rejects.append("source only uploads are not supported.")
1112
1113     ###########################################################################
1114
1115     def __dsc_filename(self):
1116         """
1117         Returns: (Status, Dsc_Filename)
1118         where
1119           Status: Boolean; True when there was no error, False otherwise
1120           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1121         """
1122         dsc_filename = None
1123
1124         # find the dsc
1125         for name, entry in self.pkg.files.items():
1126             if entry.has_key("type") and entry["type"] == "dsc":
1127                 if dsc_filename:
1128                     return False, "cannot process a .changes file with multiple .dsc's."
1129                 else:
1130                     dsc_filename = name
1131
1132         if not dsc_filename:
1133             return False, "source uploads must contain a dsc file"
1134
1135         return True, dsc_filename
1136
1137     def load_dsc(self, action=True, signing_rules=1):
1138         """
1139         Find and load the dsc from self.pkg.files into self.dsc
1140
1141         Returns: (Status, Reason)
1142         where
1143           Status: Boolean; True when there was no error, False otherwise
1144           Reason: String; When Status is False this describes the error
1145         """
1146
1147         # find the dsc
1148         (status, dsc_filename) = self.__dsc_filename()
1149         if not status:
1150             # If status is false, dsc_filename has the reason
1151             return False, dsc_filename
1152
1153         try:
1154             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1155         except CantOpenError:
1156             if not action:
1157                 return False, "%s: can't read file." % (dsc_filename)
1158         except ParseChangesError, line:
1159             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1160         except InvalidDscError, line:
1161             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1162         except ChangesUnicodeError:
1163             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1164
1165         return True, None
1166
1167     ###########################################################################
1168
1169     def check_dsc(self, action=True, session=None):
1170         """Returns bool indicating whether or not the source changes are valid"""
1171         # Ensure there is source to check
1172         if not self.pkg.changes["architecture"].has_key("source"):
1173             return True
1174
1175         (status, reason) = self.load_dsc(action=action)
1176         if not status:
1177             self.rejects.append(reason)
1178             return False
1179         (status, dsc_filename) = self.__dsc_filename()
1180         if not status:
1181             # If status is false, dsc_filename has the reason
1182             self.rejects.append(dsc_filename)
1183             return False
1184
1185         # Build up the file list of files mentioned by the .dsc
1186         try:
1187             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1188         except NoFilesFieldError:
1189             self.rejects.append("%s: no Files: field." % (dsc_filename))
1190             return False
1191         except UnknownFormatError, format:
1192             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1193             return False
1194         except ParseChangesError, line:
1195             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1196             return False
1197
1198         # Enforce mandatory fields
1199         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1200             if not self.pkg.dsc.has_key(i):
1201                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1202                 return False
1203
1204         # Validate the source and version fields
1205         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1206             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1207         if not re_valid_version.match(self.pkg.dsc["version"]):
1208             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1209
1210         # Only a limited list of source formats are allowed in each suite
1211         for dist in self.pkg.changes["distribution"].keys():
1212             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1213             if self.pkg.dsc["format"] not in allowed:
1214                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1215
1216         # Validate the Maintainer field
1217         try:
1218             # We ignore the return value
1219             fix_maintainer(self.pkg.dsc["maintainer"])
1220         except ParseMaintError, msg:
1221             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1222                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1223
1224         # Validate the build-depends field(s)
1225         for field_name in [ "build-depends", "build-depends-indep" ]:
1226             field = self.pkg.dsc.get(field_name)
1227             if field:
1228                 # Have apt try to parse them...
1229                 try:
1230                     apt_pkg.ParseSrcDepends(field)
1231                 except:
1232                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1233
1234         # Ensure the version number in the .dsc matches the version number in the .changes
1235         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1236         changes_version = self.pkg.files[dsc_filename]["version"]
1237
1238         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1239             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1240
1241         # Ensure the Files field contain only what's expected
1242         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1243
1244         # Ensure source is newer than existing source in target suites
1245         session = DBConn().session()
1246         self.check_source_against_db(dsc_filename, session)
1247         self.check_dsc_against_db(dsc_filename, session)
1248
1249         dbchg = get_dbchange(self.pkg.changes_file, session)
1250
1251         # Finally, check if we're missing any files
1252         for f in self.later_check_files:
1253             print 'XXX: %s' % f
1254             # Check if we've already processed this file if we have a dbchg object
1255             ok = False
1256             if dbchg:
1257                 for pf in dbchg.files:
1258                     if pf.filename == f and pf.processed:
1259                         self.notes.append('%s was already processed so we can go ahead' % f)
1260                         ok = True
1261                         del self.pkg.files[f]
1262             if not ok:
1263                 self.rejects.append("Could not find file %s references in changes" % f)
1264
1265         session.close()
1266
1267         return (len(rejects) == 0)
1268
1269     ###########################################################################
1270
1271     def get_changelog_versions(self, source_dir):
1272         """Extracts a the source package and (optionally) grabs the
1273         version history out of debian/changelog for the BTS."""
1274
1275         cnf = Config()
1276
1277         # Find the .dsc (again)
1278         dsc_filename = None
1279         for f in self.pkg.files.keys():
1280             if self.pkg.files[f]["type"] == "dsc":
1281                 dsc_filename = f
1282
1283         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1284         if not dsc_filename:
1285             return
1286
1287         # Create a symlink mirror of the source files in our temporary directory
1288         for f in self.pkg.files.keys():
1289             m = re_issource.match(f)
1290             if m:
1291                 src = os.path.join(source_dir, f)
1292                 # If a file is missing for whatever reason, give up.
1293                 if not os.path.exists(src):
1294                     return
1295                 ftype = m.group(3)
1296                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1297                    self.pkg.orig_files[f].has_key("path"):
1298                     continue
1299                 dest = os.path.join(os.getcwd(), f)
1300                 os.symlink(src, dest)
1301
1302         # If the orig files are not a part of the upload, create symlinks to the
1303         # existing copies.
1304         for orig_file in self.pkg.orig_files.keys():
1305             if not self.pkg.orig_files[orig_file].has_key("path"):
1306                 continue
1307             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1308             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1309
1310         # Extract the source
1311         try:
1312             unpacked = UnpackedSource(dsc_filename)
1313         except:
1314             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1315             return
1316
1317         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1318             return
1319
1320         # Get the upstream version
1321         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1322         if re_strip_revision.search(upstr_version):
1323             upstr_version = re_strip_revision.sub('', upstr_version)
1324
1325         # Ensure the changelog file exists
1326         changelog_file = unpacked.get_changelog_file()
1327         if changelog_file is None:
1328             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1329             return
1330
1331         # Parse the changelog
1332         self.pkg.dsc["bts changelog"] = ""
1333         for line in changelog_file.readlines():
1334             m = re_changelog_versions.match(line)
1335             if m:
1336                 self.pkg.dsc["bts changelog"] += line
1337         changelog_file.close()
1338         unpacked.cleanup()
1339
1340         # Check we found at least one revision in the changelog
1341         if not self.pkg.dsc["bts changelog"]:
1342             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1343
1344     def check_source(self):
1345         # Bail out if:
1346         #    a) there's no source
1347         if not self.pkg.changes["architecture"].has_key("source"):
1348             return
1349
1350         tmpdir = utils.temp_dirname()
1351
1352         # Move into the temporary directory
1353         cwd = os.getcwd()
1354         os.chdir(tmpdir)
1355
1356         # Get the changelog version history
1357         self.get_changelog_versions(cwd)
1358
1359         # Move back and cleanup the temporary tree
1360         os.chdir(cwd)
1361
1362         try:
1363             shutil.rmtree(tmpdir)
1364         except OSError, e:
1365             if e.errno != errno.EACCES:
1366                 print "foobar"
1367                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1368
1369             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1370             # We probably have u-r or u-w directories so chmod everything
1371             # and try again.
1372             cmd = "chmod -R u+rwx %s" % (tmpdir)
1373             result = os.system(cmd)
1374             if result != 0:
1375                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1376             shutil.rmtree(tmpdir)
1377         except Exception, e:
1378             print "foobar2 (%s)" % e
1379             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1380
1381     ###########################################################################
1382     def ensure_hashes(self):
1383         # Make sure we recognise the format of the Files: field in the .changes
1384         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1385         if len(format) == 2:
1386             format = int(format[0]), int(format[1])
1387         else:
1388             format = int(float(format[0])), 0
1389
1390         # We need to deal with the original changes blob, as the fields we need
1391         # might not be in the changes dict serialised into the .dak anymore.
1392         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1393
1394         # Copy the checksums over to the current changes dict.  This will keep
1395         # the existing modifications to it intact.
1396         for field in orig_changes:
1397             if field.startswith('checksums-'):
1398                 self.pkg.changes[field] = orig_changes[field]
1399
1400         # Check for unsupported hashes
1401         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1402             self.rejects.append(j)
1403
1404         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1405             self.rejects.append(j)
1406
1407         # We have to calculate the hash if we have an earlier changes version than
1408         # the hash appears in rather than require it exist in the changes file
1409         for hashname, hashfunc, version in utils.known_hashes:
1410             # TODO: Move _ensure_changes_hash into this class
1411             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1412                 self.rejects.append(j)
1413             if "source" in self.pkg.changes["architecture"]:
1414                 # TODO: Move _ensure_dsc_hash into this class
1415                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1416                     self.rejects.append(j)
1417
1418     def check_hashes(self):
1419         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1420             self.rejects.append(m)
1421
1422         for m in utils.check_size(".changes", self.pkg.files):
1423             self.rejects.append(m)
1424
1425         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1426             self.rejects.append(m)
1427
1428         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1429             self.rejects.append(m)
1430
1431         self.ensure_hashes()
1432
1433     ###########################################################################
1434
1435     def ensure_orig(self, target_dir='.', session=None):
1436         """
1437         Ensures that all orig files mentioned in the changes file are present
1438         in target_dir. If they do not exist, they are symlinked into place.
1439
1440         An list containing the symlinks that were created are returned (so they
1441         can be removed).
1442         """
1443
1444         symlinked = []
1445         cnf = Config()
1446
1447         for filename, entry in self.pkg.dsc_files.iteritems():
1448             if not re_is_orig_source.match(filename):
1449                 # File is not an orig; ignore
1450                 continue
1451
1452             if os.path.exists(filename):
1453                 # File exists, no need to continue
1454                 continue
1455
1456             def symlink_if_valid(path):
1457                 f = utils.open_file(path)
1458                 md5sum = apt_pkg.md5sum(f)
1459                 f.close()
1460
1461                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1462                 expected = (int(entry['size']), entry['md5sum'])
1463
1464                 if fingerprint != expected:
1465                     return False
1466
1467                 dest = os.path.join(target_dir, filename)
1468
1469                 os.symlink(path, dest)
1470                 symlinked.append(dest)
1471
1472                 return True
1473
1474             session_ = session
1475             if session is None:
1476                 session_ = DBConn().session()
1477
1478             found = False
1479
1480             # Look in the pool
1481             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1482                 poolfile_path = os.path.join(
1483                     poolfile.location.path, poolfile.filename
1484                 )
1485
1486                 if symlink_if_valid(poolfile_path):
1487                     found = True
1488                     break
1489
1490             if session is None:
1491                 session_.close()
1492
1493             if found:
1494                 continue
1495
1496             # Look in some other queues for the file
1497             queues = ('New', 'Byhand', 'ProposedUpdates',
1498                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1499
1500             for queue in queues:
1501                 if not cnf.get('Dir::Queue::%s' % queue):
1502                     continue
1503
1504                 queuefile_path = os.path.join(
1505                     cnf['Dir::Queue::%s' % queue], filename
1506                 )
1507
1508                 if not os.path.exists(queuefile_path):
1509                     # Does not exist in this queue
1510                     continue
1511
1512                 if symlink_if_valid(queuefile_path):
1513                     break
1514
1515         return symlinked
1516
1517     ###########################################################################
1518
1519     def check_lintian(self):
1520         """
1521         Extends self.rejects by checking the output of lintian against tags
1522         specified in Dinstall::LintianTags.
1523         """
1524
1525         cnf = Config()
1526
1527         # Don't reject binary uploads
1528         if not self.pkg.changes['architecture'].has_key('source'):
1529             return
1530
1531         # Only check some distributions
1532         for dist in ('unstable', 'experimental'):
1533             if dist in self.pkg.changes['distribution']:
1534                 break
1535         else:
1536             return
1537
1538         # If we do not have a tagfile, don't do anything
1539         tagfile = cnf.get("Dinstall::LintianTags")
1540         if not tagfile:
1541             return
1542
1543         # Parse the yaml file
1544         sourcefile = file(tagfile, 'r')
1545         sourcecontent = sourcefile.read()
1546         sourcefile.close()
1547
1548         try:
1549             lintiantags = yaml.load(sourcecontent)['lintian']
1550         except yaml.YAMLError, msg:
1551             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1552             return
1553
1554         # Try and find all orig mentioned in the .dsc
1555         symlinked = self.ensure_orig()
1556
1557         # Setup the input file for lintian
1558         fd, temp_filename = utils.temp_filename()
1559         temptagfile = os.fdopen(fd, 'w')
1560         for tags in lintiantags.values():
1561             temptagfile.writelines(['%s\n' % x for x in tags])
1562         temptagfile.close()
1563
1564         try:
1565             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1566                 (temp_filename, self.pkg.changes_file)
1567
1568             result, output = commands.getstatusoutput(cmd)
1569         finally:
1570             # Remove our tempfile and any symlinks we created
1571             os.unlink(temp_filename)
1572
1573             for symlink in symlinked:
1574                 os.unlink(symlink)
1575
1576         if result == 2:
1577             utils.warn("lintian failed for %s [return code: %s]." % \
1578                 (self.pkg.changes_file, result))
1579             utils.warn(utils.prefix_multi_line_string(output, \
1580                 " [possible output:] "))
1581
1582         def log(*txt):
1583             if self.logger:
1584                 self.logger.log(
1585                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1586                 )
1587
1588         # Generate messages
1589         parsed_tags = parse_lintian_output(output)
1590         self.rejects.extend(
1591             generate_reject_messages(parsed_tags, lintiantags, log=log)
1592         )
1593
1594     ###########################################################################
1595     def check_urgency(self):
1596         cnf = Config()
1597         if self.pkg.changes["architecture"].has_key("source"):
1598             if not self.pkg.changes.has_key("urgency"):
1599                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1600             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1601             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1602                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1603                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1604                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605
1606     ###########################################################################
1607
1608     # Sanity check the time stamps of files inside debs.
1609     # [Files in the near future cause ugly warnings and extreme time
1610     #  travel can cause errors on extraction]
1611
1612     def check_timestamps(self):
1613         Cnf = Config()
1614
1615         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1616         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1617         tar = TarTime(future_cutoff, past_cutoff)
1618
1619         for filename, entry in self.pkg.files.items():
1620             if entry["type"] == "deb":
1621                 tar.reset()
1622                 try:
1623                     deb_file = utils.open_file(filename)
1624                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1625                     deb_file.seek(0)
1626                     try:
1627                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1628                     except SystemError, e:
1629                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1630                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1631                             raise
1632                         deb_file.seek(0)
1633                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1634
1635                     deb_file.close()
1636
1637                     future_files = tar.future_files.keys()
1638                     if future_files:
1639                         num_future_files = len(future_files)
1640                         future_file = future_files[0]
1641                         future_date = tar.future_files[future_file]
1642                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1643                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1644
1645                     ancient_files = tar.ancient_files.keys()
1646                     if ancient_files:
1647                         num_ancient_files = len(ancient_files)
1648                         ancient_file = ancient_files[0]
1649                         ancient_date = tar.ancient_files[ancient_file]
1650                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1651                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1652                 except:
1653                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1654
1655     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1656         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1657             sponsored = False
1658         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1659             sponsored = False
1660             if uid_name == "":
1661                 sponsored = True
1662         else:
1663             sponsored = True
1664             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1665                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1666                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1667                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1668                         self.pkg.changes["sponsoremail"] = uid_email
1669
1670         return sponsored
1671
1672
1673     ###########################################################################
1674     # check_signed_by_key checks
1675     ###########################################################################
1676
1677     def check_signed_by_key(self):
1678         """Ensure the .changes is signed by an authorized uploader."""
1679         session = DBConn().session()
1680
1681         # First of all we check that the person has proper upload permissions
1682         # and that this upload isn't blocked
1683         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1684
1685         if fpr is None:
1686             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1687             return
1688
1689         # TODO: Check that import-keyring adds UIDs properly
1690         if not fpr.uid:
1691             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1692             return
1693
1694         # Check that the fingerprint which uploaded has permission to do so
1695         self.check_upload_permissions(fpr, session)
1696
1697         # Check that this package is not in a transition
1698         self.check_transition(session)
1699
1700         session.close()
1701
1702
1703     def check_upload_permissions(self, fpr, session):
1704         # Check any one-off upload blocks
1705         self.check_upload_blocks(fpr, session)
1706
1707         # If the source_acl is None, source is never allowed
1708         if fpr.source_acl is None:
1709             if self.pkg.changes["architecture"].has_key("source"):
1710                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1711                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1712                 self.rejects.append(rej)
1713                 return
1714         # Do DM as a special case
1715         # DM is a special case unfortunately, so we check it first
1716         # (keys with no source access get more access than DMs in one
1717         #  way; DMs can only upload for their packages whether source
1718         #  or binary, whereas keys with no access might be able to
1719         #  upload some binaries)
1720         elif fpr.source_acl.access_level == 'dm':
1721             self.check_dm_upload(fpr, session)
1722         else:
1723             # If not a DM, we allow full upload rights
1724             uid_email = "%s@debian.org" % (fpr.uid.uid)
1725             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1726
1727
1728         # Check binary upload permissions
1729         # By this point we know that DMs can't have got here unless they
1730         # are allowed to deal with the package concerned so just apply
1731         # normal checks
1732         if fpr.binary_acl.access_level == 'full':
1733             return
1734
1735         # Otherwise we're in the map case
1736         tmparches = self.pkg.changes["architecture"].copy()
1737         tmparches.pop('source', None)
1738
1739         for bam in fpr.binary_acl_map:
1740             tmparches.pop(bam.architecture.arch_string, None)
1741
1742         if len(tmparches.keys()) > 0:
1743             if fpr.binary_reject:
1744                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1745                 if len(tmparches.keys()) == 1:
1746                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1747                 else:
1748                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1749                 self.rejects.append(rej)
1750             else:
1751                 # TODO: This is where we'll implement reject vs throw away binaries later
1752                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1753                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1754                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1755                 self.rejects.append(rej)
1756
1757
1758     def check_upload_blocks(self, fpr, session):
1759         """Check whether any upload blocks apply to this source, source
1760            version, uid / fpr combination"""
1761
1762         def block_rej_template(fb):
1763             rej = 'Manual upload block in place for package %s' % fb.source
1764             if fb.version is not None:
1765                 rej += ', version %s' % fb.version
1766             return rej
1767
1768         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1769             # version is None if the block applies to all versions
1770             if fb.version is None or fb.version == self.pkg.changes['version']:
1771                 # Check both fpr and uid - either is enough to cause a reject
1772                 if fb.fpr is not None:
1773                     if fb.fpr.fingerprint == fpr.fingerprint:
1774                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1775                 if fb.uid is not None:
1776                     if fb.uid == fpr.uid:
1777                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1778
1779
1780     def check_dm_upload(self, fpr, session):
1781         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1782         ## none of the uploaded packages are NEW
1783         rej = False
1784         for f in self.pkg.files.keys():
1785             if self.pkg.files[f].has_key("byhand"):
1786                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1787                 rej = True
1788             if self.pkg.files[f].has_key("new"):
1789                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1790                 rej = True
1791
1792         if rej:
1793             return
1794
1795         r = get_newest_source(self.pkg.changes["source"], session)
1796
1797         if r is None:
1798             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1799             self.rejects.append(rej)
1800             return
1801
1802         if not r.dm_upload_allowed:
1803             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1804             self.rejects.append(rej)
1805             return
1806
1807         ## the Maintainer: field of the uploaded .changes file corresponds with
1808         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1809         ## uploads)
1810         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1811             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1812
1813         ## the most recent version of the package uploaded to unstable or
1814         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1815         ## non-developer maintainers cannot NMU or hijack packages)
1816
1817         # uploader includes the maintainer
1818         accept = False
1819         for uploader in r.uploaders:
1820             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1821             # Eww - I hope we never have two people with the same name in Debian
1822             if email == fpr.uid.uid or name == fpr.uid.name:
1823                 accept = True
1824                 break
1825
1826         if not accept:
1827             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1828             return
1829
1830         ## none of the packages are being taken over from other source packages
1831         for b in self.pkg.changes["binary"].keys():
1832             for suite in self.pkg.changes["distribution"].keys():
1833                 for s in get_source_by_package_and_suite(b, suite, session):
1834                     if s.source != self.pkg.changes["source"]:
1835                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1836
1837
1838
1839     def check_transition(self, session):
1840         cnf = Config()
1841
1842         sourcepkg = self.pkg.changes["source"]
1843
1844         # No sourceful upload -> no need to do anything else, direct return
1845         # We also work with unstable uploads, not experimental or those going to some
1846         # proposed-updates queue
1847         if "source" not in self.pkg.changes["architecture"] or \
1848            "unstable" not in self.pkg.changes["distribution"]:
1849             return
1850
1851         # Also only check if there is a file defined (and existant) with
1852         # checks.
1853         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1854         if transpath == "" or not os.path.exists(transpath):
1855             return
1856
1857         # Parse the yaml file
1858         sourcefile = file(transpath, 'r')
1859         sourcecontent = sourcefile.read()
1860         try:
1861             transitions = yaml.load(sourcecontent)
1862         except yaml.YAMLError, msg:
1863             # This shouldn't happen, there is a wrapper to edit the file which
1864             # checks it, but we prefer to be safe than ending up rejecting
1865             # everything.
1866             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1867             return
1868
1869         # Now look through all defined transitions
1870         for trans in transitions:
1871             t = transitions[trans]
1872             source = t["source"]
1873             expected = t["new"]
1874
1875             # Will be None if nothing is in testing.
1876             current = get_source_in_suite(source, "testing", session)
1877             if current is not None:
1878                 compare = apt_pkg.VersionCompare(current.version, expected)
1879
1880             if current is None or compare < 0:
1881                 # This is still valid, the current version in testing is older than
1882                 # the new version we wait for, or there is none in testing yet
1883
1884                 # Check if the source we look at is affected by this.
1885                 if sourcepkg in t['packages']:
1886                     # The source is affected, lets reject it.
1887
1888                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1889                         sourcepkg, trans)
1890
1891                     if current is not None:
1892                         currentlymsg = "at version %s" % (current.version)
1893                     else:
1894                         currentlymsg = "not present in testing"
1895
1896                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1897
1898                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1899 is part of a testing transition designed to get %s migrated (it is
1900 currently %s, we need version %s).  This transition is managed by the
1901 Release Team, and %s is the Release-Team member responsible for it.
1902 Please mail debian-release@lists.debian.org or contact %s directly if you
1903 need further assistance.  You might want to upload to experimental until this
1904 transition is done."""
1905                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1906
1907                     self.rejects.append(rejectmsg)
1908                     return
1909
1910     ###########################################################################
1911     # End check_signed_by_key checks
1912     ###########################################################################
1913
1914     def build_summaries(self):
1915         """ Build a summary of changes the upload introduces. """
1916
1917         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1918
1919         short_summary = summary
1920
1921         # This is for direport's benefit...
1922         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1923
1924         if byhand or new:
1925             summary += "Changes: " + f
1926
1927         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1928
1929         summary += self.announce(short_summary, 0)
1930
1931         return (summary, short_summary)
1932
1933     ###########################################################################
1934
1935     def close_bugs(self, summary, action):
1936         """
1937         Send mail to close bugs as instructed by the closes field in the changes file.
1938         Also add a line to summary if any work was done.
1939
1940         @type summary: string
1941         @param summary: summary text, as given by L{build_summaries}
1942
1943         @type action: bool
1944         @param action: Set to false no real action will be done.
1945
1946         @rtype: string
1947         @return: summary. If action was taken, extended by the list of closed bugs.
1948
1949         """
1950
1951         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1952
1953         bugs = self.pkg.changes["closes"].keys()
1954
1955         if not bugs:
1956             return summary
1957
1958         bugs.sort()
1959         summary += "Closing bugs: "
1960         for bug in bugs:
1961             summary += "%s " % (bug)
1962             if action:
1963                 self.update_subst()
1964                 self.Subst["__BUG_NUMBER__"] = bug
1965                 if self.pkg.changes["distribution"].has_key("stable"):
1966                     self.Subst["__STABLE_WARNING__"] = """
1967 Note that this package is not part of the released stable Debian
1968 distribution.  It may have dependencies on other unreleased software,
1969 or other instabilities.  Please take care if you wish to install it.
1970 The update will eventually make its way into the next released Debian
1971 distribution."""
1972                 else:
1973                     self.Subst["__STABLE_WARNING__"] = ""
1974                 mail_message = utils.TemplateSubst(self.Subst, template)
1975                 utils.send_mail(mail_message)
1976
1977                 # Clear up after ourselves
1978                 del self.Subst["__BUG_NUMBER__"]
1979                 del self.Subst["__STABLE_WARNING__"]
1980
1981         if action and self.logger:
1982             self.logger.log(["closing bugs"] + bugs)
1983
1984         summary += "\n"
1985
1986         return summary
1987
1988     ###########################################################################
1989
1990     def announce(self, short_summary, action):
1991         """
1992         Send an announce mail about a new upload.
1993
1994         @type short_summary: string
1995         @param short_summary: Short summary text to include in the mail
1996
1997         @type action: bool
1998         @param action: Set to false no real action will be done.
1999
2000         @rtype: string
2001         @return: Textstring about action taken.
2002
2003         """
2004
2005         cnf = Config()
2006         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2007
2008         # Only do announcements for source uploads with a recent dpkg-dev installed
2009         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2010            self.pkg.changes["architecture"].has_key("source"):
2011             return ""
2012
2013         lists_done = {}
2014         summary = ""
2015
2016         self.Subst["__SHORT_SUMMARY__"] = short_summary
2017
2018         for dist in self.pkg.changes["distribution"].keys():
2019             suite = get_suite(dist)
2020             if suite is None: continue
2021             announce_list = suite.announce
2022             if announce_list == "" or lists_done.has_key(announce_list):
2023                 continue
2024
2025             lists_done[announce_list] = 1
2026             summary += "Announcing to %s\n" % (announce_list)
2027
2028             if action:
2029                 self.update_subst()
2030                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2031                 if cnf.get("Dinstall::TrackingServer") and \
2032                    self.pkg.changes["architecture"].has_key("source"):
2033                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2034                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2035
2036                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2037                 utils.send_mail(mail_message)
2038
2039                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2040
2041         if cnf.FindB("Dinstall::CloseBugs"):
2042             summary = self.close_bugs(summary, action)
2043
2044         del self.Subst["__SHORT_SUMMARY__"]
2045
2046         return summary
2047
2048     ###########################################################################
2049     @session_wrapper
2050     def accept (self, summary, short_summary, session=None):
2051         """
2052         Accept an upload.
2053
2054         This moves all files referenced from the .changes into the pool,
2055         sends the accepted mail, announces to lists, closes bugs and
2056         also checks for override disparities. If enabled it will write out
2057         the version history for the BTS Version Tracking and will finally call
2058         L{queue_build}.
2059
2060         @type summary: string
2061         @param summary: Summary text
2062
2063         @type short_summary: string
2064         @param short_summary: Short summary
2065         """
2066
2067         cnf = Config()
2068         stats = SummaryStats()
2069
2070         print "Installing."
2071         self.logger.log(["installing changes", self.pkg.changes_file])
2072
2073         binaries = []
2074         poolfiles = []
2075
2076         # Add the .dsc file to the DB first
2077         for newfile, entry in self.pkg.files.items():
2078             if entry["type"] == "dsc":
2079                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2080                 for j in pfs:
2081                     poolfiles.append(j)
2082
2083         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2084         for newfile, entry in self.pkg.files.items():
2085             if entry["type"] == "deb":
2086                 b, pf = add_deb_to_db(self, newfile, session)
2087                 binaries.append(b)
2088                 poolfiles.append(pf)
2089
2090         # If this is a sourceful diff only upload that is moving
2091         # cross-component we need to copy the .orig files into the new
2092         # component too for the same reasons as above.
2093         # XXX: mhy: I think this should be in add_dsc_to_db
2094         if self.pkg.changes["architecture"].has_key("source"):
2095             for orig_file in self.pkg.orig_files.keys():
2096                 if not self.pkg.orig_files[orig_file].has_key("id"):
2097                     continue # Skip if it's not in the pool
2098                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2099                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2100                     continue # Skip if the location didn't change
2101
2102                 # Do the move
2103                 oldf = get_poolfile_by_id(orig_file_id, session)
2104                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2105                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2106                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2107
2108                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2109
2110                 # TODO: Care about size/md5sum collisions etc
2111                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2112
2113                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2114                 if newf is None:
2115                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2116                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2117
2118                     session.flush()
2119
2120                     # Don't reference the old file from this changes
2121                     for p in poolfiles:
2122                         if p.file_id == oldf.file_id:
2123                             poolfiles.remove(p)
2124
2125                     poolfiles.append(newf)
2126
2127                     # Fix up the DSC references
2128                     toremove = []
2129
2130                     for df in source.srcfiles:
2131                         if df.poolfile.file_id == oldf.file_id:
2132                             # Add a new DSC entry and mark the old one for deletion
2133                             # Don't do it in the loop so we don't change the thing we're iterating over
2134                             newdscf = DSCFile()
2135                             newdscf.source_id = source.source_id
2136                             newdscf.poolfile_id = newf.file_id
2137                             session.add(newdscf)
2138
2139                             toremove.append(df)
2140
2141                     for df in toremove:
2142                         session.delete(df)
2143
2144                     # Flush our changes
2145                     session.flush()
2146
2147                     # Make sure that our source object is up-to-date
2148                     session.expire(source)
2149
2150         # Add changelog information to the database
2151         self.store_changelog()
2152
2153         # Install the files into the pool
2154         for newfile, entry in self.pkg.files.items():
2155             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2156             utils.move(newfile, destination)
2157             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2158             stats.accept_bytes += float(entry["size"])
2159
2160         # Copy the .changes file across for suite which need it.
2161         copy_changes = dict([(x.copychanges, '')
2162                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2163                              if x.copychanges is not None])
2164
2165         for dest in copy_changes.keys():
2166             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2167
2168         # We're done - commit the database changes
2169         session.commit()
2170         # Our SQL session will automatically start a new transaction after
2171         # the last commit
2172
2173         # Now ensure that the metadata has been added
2174         # This has to be done after we copy the files into the pool
2175         # For source if we have it:
2176         if self.pkg.changes["architecture"].has_key("source"):
2177             import_metadata_into_db(source, session)
2178
2179         # Now for any of our binaries
2180         for b in binaries:
2181             import_metadata_into_db(b, session)
2182
2183         session.commit()
2184
2185         # Move the .changes into the 'done' directory
2186         ye, mo, da = time.gmtime()[0:3]
2187         donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2188         if not os.path.isdir(donedir):
2189             os.makedirs(donedir)
2190
2191         utils.move(self.pkg.changes_file,
2192                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2193
2194         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2195             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2196
2197         self.update_subst()
2198         self.Subst["__SUMMARY__"] = summary
2199         mail_message = utils.TemplateSubst(self.Subst,
2200                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2201         utils.send_mail(mail_message)
2202         self.announce(short_summary, 1)
2203
2204         ## Helper stuff for DebBugs Version Tracking
2205         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2206             if self.pkg.changes["architecture"].has_key("source"):
2207                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2208                 version_history = os.fdopen(fd, 'w')
2209                 version_history.write(self.pkg.dsc["bts changelog"])
2210                 version_history.close()
2211                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2212                                       self.pkg.changes_file[:-8]+".versions")
2213                 os.rename(temp_filename, filename)
2214                 os.chmod(filename, 0644)
2215
2216             # Write out the binary -> source mapping.
2217             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2218             debinfo = os.fdopen(fd, 'w')
2219             for name, entry in sorted(self.pkg.files.items()):
2220                 if entry["type"] == "deb":
2221                     line = " ".join([entry["package"], entry["version"],
2222                                      entry["architecture"], entry["source package"],
2223                                      entry["source version"]])
2224                     debinfo.write(line+"\n")
2225             debinfo.close()
2226             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2227                                   self.pkg.changes_file[:-8]+".debinfo")
2228             os.rename(temp_filename, filename)
2229             os.chmod(filename, 0644)
2230
2231         session.commit()
2232
2233         # Set up our copy queues (e.g. buildd queues)
2234         for suite_name in self.pkg.changes["distribution"].keys():
2235             suite = get_suite(suite_name, session)
2236             for q in suite.copy_queues:
2237                 for f in poolfiles:
2238                     q.add_file_from_pool(f)
2239
2240         session.commit()
2241
2242         # Finally...
2243         stats.accept_count += 1
2244
2245     def check_override(self):
2246         """
2247         Checks override entries for validity. Mails "Override disparity" warnings,
2248         if that feature is enabled.
2249
2250         Abandons the check if
2251           - override disparity checks are disabled
2252           - mail sending is disabled
2253         """
2254
2255         cnf = Config()
2256
2257         # Abandon the check if override disparity checks have been disabled
2258         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2259             return
2260
2261         summary = self.pkg.check_override()
2262
2263         if summary == "":
2264             return
2265
2266         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2267
2268         self.update_subst()
2269         self.Subst["__SUMMARY__"] = summary
2270         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2271         utils.send_mail(mail_message)
2272         del self.Subst["__SUMMARY__"]
2273
2274     ###########################################################################
2275
2276     def remove(self, from_dir=None):
2277         """
2278         Used (for instance) in p-u to remove the package from unchecked
2279
2280         Also removes the package from holding area.
2281         """
2282         if from_dir is None:
2283             from_dir = self.pkg.directory
2284         h = Holding()
2285
2286         for f in self.pkg.files.keys():
2287             os.unlink(os.path.join(from_dir, f))
2288             if os.path.exists(os.path.join(h.holding_dir, f)):
2289                 os.unlink(os.path.join(h.holding_dir, f))
2290
2291         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2292         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2293             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2294
2295     ###########################################################################
2296
2297     def move_to_queue (self, queue):
2298         """
2299         Move files to a destination queue using the permissions in the table
2300         """
2301         h = Holding()
2302         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2303                    queue.path, perms=int(queue.change_perms, 8))
2304         for f in self.pkg.files.keys():
2305             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2306
2307     ###########################################################################
2308
2309     def force_reject(self, reject_files):
2310         """
2311         Forcefully move files from the current directory to the
2312         reject directory.  If any file already exists in the reject
2313         directory it will be moved to the morgue to make way for
2314         the new file.
2315
2316         @type reject_files: dict
2317         @param reject_files: file dictionary
2318
2319         """
2320
2321         cnf = Config()
2322
2323         for file_entry in reject_files:
2324             # Skip any files which don't exist or which we don't have permission to copy.
2325             if os.access(file_entry, os.R_OK) == 0:
2326                 continue
2327
2328             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2329
2330             try:
2331                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2332             except OSError, e:
2333                 # File exists?  Let's find a new name by adding a number
2334                 if e.errno == errno.EEXIST:
2335                     try:
2336                         dest_file = utils.find_next_free(dest_file, 255)
2337                     except NoFreeFilenameError:
2338                         # Something's either gone badly Pete Tong, or
2339                         # someone is trying to exploit us.
2340                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2341                         return
2342
2343                     # Make sure we really got it
2344                     try:
2345                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2346                     except OSError, e:
2347                         # Likewise
2348                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2349                         return
2350                 else:
2351                     raise
2352             # If we got here, we own the destination file, so we can
2353             # safely overwrite it.
2354             utils.move(file_entry, dest_file, 1, perms=0660)
2355             os.close(dest_fd)
2356
2357     ###########################################################################
2358     def do_reject (self, manual=0, reject_message="", notes=""):
2359         """
2360         Reject an upload. If called without a reject message or C{manual} is
2361         true, spawn an editor so the user can write one.
2362
2363         @type manual: bool
2364         @param manual: manual or automated rejection
2365
2366         @type reject_message: string
2367         @param reject_message: A reject message
2368
2369         @return: 0
2370
2371         """
2372         # If we weren't given a manual rejection message, spawn an
2373         # editor so the user can add one in...
2374         if manual and not reject_message:
2375             (fd, temp_filename) = utils.temp_filename()
2376             temp_file = os.fdopen(fd, 'w')
2377             if len(notes) > 0:
2378                 for note in notes:
2379                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2380                                     % (note.author, note.version, note.notedate, note.comment))
2381             temp_file.close()
2382             editor = os.environ.get("EDITOR","vi")
2383             answer = 'E'
2384             while answer == 'E':
2385                 os.system("%s %s" % (editor, temp_filename))
2386                 temp_fh = utils.open_file(temp_filename)
2387                 reject_message = "".join(temp_fh.readlines())
2388                 temp_fh.close()
2389                 print "Reject message:"
2390                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2391                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2392                 answer = "XXX"
2393                 while prompt.find(answer) == -1:
2394                     answer = utils.our_raw_input(prompt)
2395                     m = re_default_answer.search(prompt)
2396                     if answer == "":
2397                         answer = m.group(1)
2398                     answer = answer[:1].upper()
2399             os.unlink(temp_filename)
2400             if answer == 'A':
2401                 return 1
2402             elif answer == 'Q':
2403                 sys.exit(0)
2404
2405         print "Rejecting.\n"
2406
2407         cnf = Config()
2408
2409         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2410         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2411
2412         # Move all the files into the reject directory
2413         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2414         self.force_reject(reject_files)
2415
2416         # If we fail here someone is probably trying to exploit the race
2417         # so let's just raise an exception ...
2418         if os.path.exists(reason_filename):
2419             os.unlink(reason_filename)
2420         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2421
2422         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2423
2424         self.update_subst()
2425         if not manual:
2426             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2427             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2428             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2429             os.write(reason_fd, reject_message)
2430             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2431         else:
2432             # Build up the rejection email
2433             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2434             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2435             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2436             self.Subst["__REJECT_MESSAGE__"] = ""
2437             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2438             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2439             # Write the rejection email out as the <foo>.reason file
2440             os.write(reason_fd, reject_mail_message)
2441
2442         del self.Subst["__REJECTOR_ADDRESS__"]
2443         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2444         del self.Subst["__CC__"]
2445
2446         os.close(reason_fd)
2447
2448         # Send the rejection mail
2449         utils.send_mail(reject_mail_message)
2450
2451         if self.logger:
2452             self.logger.log(["rejected", self.pkg.changes_file])
2453
2454         return 0
2455
2456     ################################################################################
2457     def in_override_p(self, package, component, suite, binary_type, filename, session):
2458         """
2459         Check if a package already has override entries in the DB
2460
2461         @type package: string
2462         @param package: package name
2463
2464         @type component: string
2465         @param component: database id of the component
2466
2467         @type suite: int
2468         @param suite: database id of the suite
2469
2470         @type binary_type: string
2471         @param binary_type: type of the package
2472
2473         @type filename: string
2474         @param filename: filename we check
2475
2476         @return: the database result. But noone cares anyway.
2477
2478         """
2479
2480         cnf = Config()
2481
2482         if binary_type == "": # must be source
2483             file_type = "dsc"
2484         else:
2485             file_type = binary_type
2486
2487         # Override suite name; used for example with proposed-updates
2488         oldsuite = get_suite(suite, session)
2489         if (not oldsuite is None) and oldsuite.overridesuite:
2490             suite = oldsuite.overridesuite
2491
2492         result = get_override(package, suite, component, file_type, session)
2493
2494         # If checking for a source package fall back on the binary override type
2495         if file_type == "dsc" and len(result) < 1:
2496             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2497
2498         # Remember the section and priority so we can check them later if appropriate
2499         if len(result) > 0:
2500             result = result[0]
2501             self.pkg.files[filename]["override section"] = result.section.section
2502             self.pkg.files[filename]["override priority"] = result.priority.priority
2503             return result
2504
2505         return None
2506
2507     ################################################################################
2508     def get_anyversion(self, sv_list, suite):
2509         """
2510         @type sv_list: list
2511         @param sv_list: list of (suite, version) tuples to check
2512
2513         @type suite: string
2514         @param suite: suite name
2515
2516         Description: TODO
2517         """
2518         Cnf = Config()
2519         anyversion = None
2520         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2521         for (s, v) in sv_list:
2522             if s in [ x.lower() for x in anysuite ]:
2523                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2524                     anyversion = v
2525
2526         return anyversion
2527
2528     ################################################################################
2529
2530     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2531         """
2532         @type sv_list: list
2533         @param sv_list: list of (suite, version) tuples to check
2534
2535         @type filename: string
2536         @param filename: XXX
2537
2538         @type new_version: string
2539         @param new_version: XXX
2540
2541         Ensure versions are newer than existing packages in target
2542         suites and that cross-suite version checking rules as
2543         set out in the conf file are satisfied.
2544         """
2545
2546         cnf = Config()
2547
2548         # Check versions for each target suite
2549         for target_suite in self.pkg.changes["distribution"].keys():
2550             # Check we can find the target suite
2551             ts = get_suite(target_suite)
2552             if ts is None:
2553                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2554                 continue
2555
2556             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2557             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2558
2559             # Enforce "must be newer than target suite" even if conffile omits it
2560             if target_suite not in must_be_newer_than:
2561                 must_be_newer_than.append(target_suite)
2562
2563             for (suite, existent_version) in sv_list:
2564                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2565
2566                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2567                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2568
2569                 if suite in must_be_older_than and vercmp > -1:
2570                     cansave = 0
2571
2572                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2573                         # we really use the other suite, ignoring the conflicting one ...
2574                         addsuite = self.pkg.changes["distribution-version"][suite]
2575
2576                         add_version = self.get_anyversion(sv_list, addsuite)
2577                         target_version = self.get_anyversion(sv_list, target_suite)
2578
2579                         if not add_version:
2580                             # not add_version can only happen if we map to a suite
2581                             # that doesn't enhance the suite we're propup'ing from.
2582                             # so "propup-ver x a b c; map a d" is a problem only if
2583                             # d doesn't enhance a.
2584                             #
2585                             # i think we could always propagate in this case, rather
2586                             # than complaining. either way, this isn't a REJECT issue
2587                             #
2588                             # And - we really should complain to the dorks who configured dak
2589                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2590                             self.pkg.changes.setdefault("propdistribution", {})
2591                             self.pkg.changes["propdistribution"][addsuite] = 1
2592                             cansave = 1
2593                         elif not target_version:
2594                             # not targets_version is true when the package is NEW
2595                             # we could just stick with the "...old version..." REJECT
2596                             # for this, I think.
2597                             self.rejects.append("Won't propogate NEW packages.")
2598                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2599                             # propogation would be redundant. no need to reject though.
2600                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2601                             cansave = 1
2602                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2603                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2604                             # propogate!!
2605                             self.warnings.append("Propogating upload to %s" % (addsuite))
2606                             self.pkg.changes.setdefault("propdistribution", {})
2607                             self.pkg.changes["propdistribution"][addsuite] = 1
2608                             cansave = 1
2609
2610                     if not cansave:
2611                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2612
2613     ################################################################################
2614     def check_binary_against_db(self, filename, session):
2615         # Ensure version is sane
2616         self.cross_suite_version_check( \
2617             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2618                 self.pkg.files[filename]["architecture"], session),
2619             filename, self.pkg.files[filename]["version"], sourceful=False)
2620
2621         # Check for any existing copies of the file
2622         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2623         q = q.filter_by(version=self.pkg.files[filename]["version"])
2624         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2625
2626         if q.count() > 0:
2627             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2628
2629     ################################################################################
2630
2631     def check_source_against_db(self, filename, session):
2632         source = self.pkg.dsc.get("source")
2633         version = self.pkg.dsc.get("version")
2634
2635         # Ensure version is sane
2636         self.cross_suite_version_check( \
2637             get_suite_version_by_source(source, session), filename, version,
2638             sourceful=True)
2639
2640     ################################################################################
2641     def check_dsc_against_db(self, filename, session):
2642         """
2643
2644         @warning: NB: this function can remove entries from the 'files' index [if
2645          the orig tarball is a duplicate of the one in the archive]; if
2646          you're iterating over 'files' and call this function as part of
2647          the loop, be sure to add a check to the top of the loop to
2648          ensure you haven't just tried to dereference the deleted entry.
2649
2650         """
2651
2652         Cnf = Config()
2653         self.pkg.orig_files = {} # XXX: do we need to clear it?
2654         orig_files = self.pkg.orig_files
2655
2656         # Try and find all files mentioned in the .dsc.  This has
2657         # to work harder to cope with the multiple possible
2658         # locations of an .orig.tar.gz.
2659         # The ordering on the select is needed to pick the newest orig
2660         # when it exists in multiple places.
2661         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2662             found = None
2663             if self.pkg.files.has_key(dsc_name):
2664                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2665                 actual_size = int(self.pkg.files[dsc_name]["size"])
2666                 found = "%s in incoming" % (dsc_name)
2667
2668                 # Check the file does not already exist in the archive
2669                 ql = get_poolfile_like_name(dsc_name, session)
2670
2671                 # Strip out anything that isn't '%s' or '/%s$'
2672                 for i in ql:
2673                     if not i.filename.endswith(dsc_name):
2674                         ql.remove(i)
2675
2676                 # "[dak] has not broken them.  [dak] has fixed a
2677                 # brokenness.  Your crappy hack exploited a bug in
2678                 # the old dinstall.
2679                 #
2680                 # "(Come on!  I thought it was always obvious that
2681                 # one just doesn't release different files with
2682                 # the same name and version.)"
2683                 #                        -- ajk@ on d-devel@l.d.o
2684
2685                 if len(ql) > 0:
2686                     # Ignore exact matches for .orig.tar.gz
2687                     match = 0
2688                     if re_is_orig_source.match(dsc_name):
2689                         for i in ql:
2690                             if self.pkg.files.has_key(dsc_name) and \
2691                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2692                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2693                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2694                                 # TODO: Don't delete the entry, just mark it as not needed
2695                                 # This would fix the stupidity of changing something we often iterate over
2696                                 # whilst we're doing it
2697                                 del self.pkg.files[dsc_name]
2698                                 dsc_entry["files id"] = i.file_id
2699                                 if not orig_files.has_key(dsc_name):
2700                                     orig_files[dsc_name] = {}
2701                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2702                                 match = 1
2703
2704                                 # Don't bitch that we couldn't find this file later
2705                                 try:
2706                                     self.later_check_files.remove(dsc_name)
2707                                 except ValueError:
2708                                     pass
2709
2710
2711                     if not match:
2712                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2713
2714             elif re_is_orig_source.match(dsc_name):
2715                 # Check in the pool
2716                 ql = get_poolfile_like_name(dsc_name, session)
2717
2718                 # Strip out anything that isn't '%s' or '/%s$'
2719                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2720                 for i in ql:
2721                     if not i.filename.endswith(dsc_name):
2722                         ql.remove(i)
2723
2724                 if len(ql) > 0:
2725                     # Unfortunately, we may get more than one match here if,
2726                     # for example, the package was in potato but had an -sa
2727                     # upload in woody.  So we need to choose the right one.
2728
2729                     # default to something sane in case we don't match any or have only one
2730                     x = ql[0]
2731
2732                     if len(ql) > 1:
2733                         for i in ql:
2734                             old_file = os.path.join(i.location.path, i.filename)
2735                             old_file_fh = utils.open_file(old_file)
2736                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2737                             old_file_fh.close()
2738                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2739                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2740                                 x = i
2741
2742                     old_file = os.path.join(i.location.path, i.filename)
2743                     old_file_fh = utils.open_file(old_file)
2744                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2745                     old_file_fh.close()
2746                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2747                     found = old_file
2748                     suite_type = x.location.archive_type
2749                     # need this for updating dsc_files in install()
2750                     dsc_entry["files id"] = x.file_id
2751                     # See install() in process-accepted...
2752                     if not orig_files.has_key(dsc_name):
2753                         orig_files[dsc_name] = {}
2754                     orig_files[dsc_name]["id"] = x.file_id
2755                     orig_files[dsc_name]["path"] = old_file
2756                     orig_files[dsc_name]["location"] = x.location.location_id
2757                 else:
2758                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2759                     # Not there? Check the queue directories...
2760                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2761                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2762                             continue
2763                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2764                         if os.path.exists(in_otherdir):
2765                             in_otherdir_fh = utils.open_file(in_otherdir)
2766                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2767                             in_otherdir_fh.close()
2768                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2769                             found = in_otherdir
2770                             if not orig_files.has_key(dsc_name):
2771                                 orig_files[dsc_name] = {}
2772                             orig_files[dsc_name]["path"] = in_otherdir
2773
2774                     if not found:
2775                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2776                         continue
2777             else:
2778                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2779                 continue
2780             if actual_md5 != dsc_entry["md5sum"]:
2781                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2782             if actual_size != int(dsc_entry["size"]):
2783                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2784
2785     ################################################################################
2786     # This is used by process-new and process-holding to recheck a changes file
2787     # at the time we're running.  It mainly wraps various other internal functions
2788     # and is similar to accepted_checks - these should probably be tidied up
2789     # and combined
2790     def recheck(self, session):
2791         cnf = Config()
2792         for f in self.pkg.files.keys():
2793             # The .orig.tar.gz can disappear out from under us is it's a
2794             # duplicate of one in the archive.
2795             if not self.pkg.files.has_key(f):
2796                 continue
2797
2798             entry = self.pkg.files[f]
2799
2800             # Check that the source still exists
2801             if entry["type"] == "deb":
2802                 source_version = entry["source version"]
2803                 source_package = entry["source package"]
2804                 if not self.pkg.changes["architecture"].has_key("source") \
2805                    and not source_exists(source_package, source_version, \
2806                     suites = self.pkg.changes["distribution"].keys(), session = session):
2807                     source_epochless_version = re_no_epoch.sub('', source_version)
2808                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2809                     found = False
2810                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2811                         if cnf.has_key("Dir::Queue::%s" % (q)):
2812                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2813                                 found = True
2814                     if not found:
2815                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2816
2817             # Version and file overwrite checks
2818             if entry["type"] == "deb":
2819                 self.check_binary_against_db(f, session)
2820             elif entry["type"] == "dsc":
2821                 self.check_source_against_db(f, session)
2822                 self.check_dsc_against_db(f, session)
2823
2824     ################################################################################
2825     def accepted_checks(self, overwrite_checks, session):
2826         # Recheck anything that relies on the database; since that's not
2827         # frozen between accept and our run time when called from p-a.
2828
2829         # overwrite_checks is set to False when installing to stable/oldstable
2830
2831         propogate={}
2832         nopropogate={}
2833
2834         # Find the .dsc (again)
2835         dsc_filename = None
2836         for f in self.pkg.files.keys():
2837             if self.pkg.files[f]["type"] == "dsc":
2838                 dsc_filename = f
2839
2840         for checkfile in self.pkg.files.keys():
2841             # The .orig.tar.gz can disappear out from under us is it's a
2842             # duplicate of one in the archive.
2843             if not self.pkg.files.has_key(checkfile):
2844                 continue
2845
2846             entry = self.pkg.files[checkfile]
2847
2848             # Check that the source still exists
2849             if entry["type"] == "deb":
2850                 source_version = entry["source version"]
2851                 source_package = entry["source package"]
2852                 if not self.pkg.changes["architecture"].has_key("source") \
2853                    and not source_exists(source_package, source_version, \
2854                     suites = self.pkg.changes["distribution"].keys(), \
2855                     session = session):
2856                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2857
2858             # Version and file overwrite checks
2859             if overwrite_checks:
2860                 if entry["type"] == "deb":
2861                     self.check_binary_against_db(checkfile, session)
2862                 elif entry["type"] == "dsc":
2863                     self.check_source_against_db(checkfile, session)
2864                     self.check_dsc_against_db(dsc_filename, session)
2865
2866             # propogate in the case it is in the override tables:
2867             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2868                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2869                     propogate[suite] = 1
2870                 else:
2871                     nopropogate[suite] = 1
2872
2873         for suite in propogate.keys():
2874             if suite in nopropogate:
2875                 continue
2876             self.pkg.changes["distribution"][suite] = 1
2877
2878         for checkfile in self.pkg.files.keys():
2879             # Check the package is still in the override tables
2880             for suite in self.pkg.changes["distribution"].keys():
2881                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2882                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2883
2884     ################################################################################
2885     # If any file of an upload has a recent mtime then chances are good
2886     # the file is still being uploaded.
2887
2888     def upload_too_new(self):
2889         cnf = Config()
2890         too_new = False
2891         # Move back to the original directory to get accurate time stamps
2892         cwd = os.getcwd()
2893         os.chdir(self.pkg.directory)
2894         file_list = self.pkg.files.keys()
2895         file_list.extend(self.pkg.dsc_files.keys())
2896         file_list.append(self.pkg.changes_file)
2897         for f in file_list:
2898             try:
2899                 last_modified = time.time()-os.path.getmtime(f)
2900                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2901                     too_new = True
2902                     break
2903             except:
2904                 pass
2905
2906         os.chdir(cwd)
2907         return too_new
2908
2909     def store_changelog(self):
2910
2911         # Skip binary-only upload if it is not a bin-NMU
2912         if not self.pkg.changes['architecture'].has_key('source'):
2913             from daklib.regexes import re_bin_only_nmu
2914             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2915                 return
2916
2917         session = DBConn().session()
2918
2919         # Check if upload already has a changelog entry
2920         query = """SELECT changelog_id FROM changes WHERE source = :source
2921                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2922         if session.execute(query, {'source': self.pkg.changes['source'], \
2923                                    'version': self.pkg.changes['version'], \
2924                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2925             session.commit()
2926             return
2927
2928         # Add current changelog text into changelogs_text table, return created ID
2929         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2930         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2931
2932         # Link ID to the upload available in changes table
2933         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2934                    AND version = :version AND architecture = :architecture"""
2935         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2936                                 'version': self.pkg.changes['version'], \
2937                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2938
2939         session.commit()