]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Ignore 'Operation not permitted' error.
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_list(dsc, session).items():
137             if package not in new:
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = member.mtime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = member.mtime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # FIXME: Should move into the database
390 # suite names DMs can upload to
391 dm_suites = ['unstable', 'experimental', 'squeeze-backports']
392
393 def get_newest_source(source, session):
394     'returns the newest DBSource object in dm_suites'
395     ## the most recent version of the package uploaded to unstable or
396     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
397     ## section of its control file
398     q = session.query(DBSource).filter_by(source = source). \
399         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
400         order_by(desc('source.version'))
401     return q.first()
402
403 def get_suite_version_by_source(source, session):
404     'returns a list of tuples (suite_name, version) for source package'
405     q = session.query(Suite.suite_name, DBSource.version). \
406         join(Suite.sources).filter_by(source = source)
407     return q.all()
408
409 def get_source_by_package_and_suite(package, suite_name, session):
410     '''
411     returns a DBSource query filtered by DBBinary.package and this package's
412     suite_name
413     '''
414     return session.query(DBSource). \
415         join(DBSource.binaries).filter_by(package = package). \
416         join(DBBinary.suites).filter_by(suite_name = suite_name)
417
418 def get_suite_version_by_package(package, arch_string, session):
419     '''
420     returns a list of tuples (suite_name, version) for binary package and
421     arch_string
422     '''
423     return session.query(Suite.suite_name, DBBinary.version). \
424         join(Suite.binaries).filter_by(package = package). \
425         join(DBBinary.architecture). \
426         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
427
428 class Upload(object):
429     """
430     Everything that has to do with an upload processed.
431
432     """
433     def __init__(self):
434         self.logger = None
435         self.pkg = Changes()
436         self.reset()
437
438     ###########################################################################
439
440     def reset (self):
441         """ Reset a number of internal variables."""
442
443         # Initialize the substitution template map
444         cnf = Config()
445         self.Subst = {}
446         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
447         if cnf.has_key("Dinstall::BugServer"):
448             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
449         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
450         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
451
452         self.rejects = []
453         self.warnings = []
454         self.notes = []
455
456         self.later_check_files = []
457
458         self.pkg.reset()
459
460     def package_info(self):
461         """
462         Format various messages from this Upload to send to the maintainer.
463         """
464
465         msgs = (
466             ('Reject Reasons', self.rejects),
467             ('Warnings', self.warnings),
468             ('Notes', self.notes),
469         )
470
471         msg = ''
472         for title, messages in msgs:
473             if messages:
474                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
475         msg += '\n\n'
476
477         return msg
478
479     ###########################################################################
480     def update_subst(self):
481         """ Set up the per-package template substitution mappings """
482
483         cnf = Config()
484
485         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
486         if not self.pkg.changes.has_key("architecture") or not \
487            isinstance(self.pkg.changes["architecture"], dict):
488             self.pkg.changes["architecture"] = { "Unknown" : "" }
489
490         # and maintainer2047 may not exist.
491         if not self.pkg.changes.has_key("maintainer2047"):
492             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
493
494         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
495         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
496         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
497
498         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
499         if self.pkg.changes["architecture"].has_key("source") and \
500            self.pkg.changes["changedby822"] != "" and \
501            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
502
503             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
504             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
505             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
506         else:
507             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
509             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
510
511         # Process policy doesn't set the fingerprint field and I don't want to make it
512         # do it for now as I don't want to have to deal with the case where we accepted
513         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
514         # the meantime so the package will be remarked as rejectable.  Urgh.
515         # TODO: Fix this properly
516         if self.pkg.changes.has_key('fingerprint'):
517             session = DBConn().session()
518             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
519             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
520                 if self.pkg.changes.has_key("sponsoremail"):
521                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
522             session.close()
523
524         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
525             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
526
527         # Apply any global override of the Maintainer field
528         if cnf.get("Dinstall::OverrideMaintainer"):
529             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
530             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
531
532         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
533         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
534         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
535         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
536
537     ###########################################################################
538     def load_changes(self, filename):
539         """
540         Load a changes file and setup a dictionary around it. Also checks for mandantory
541         fields  within.
542
543         @type filename: string
544         @param filename: Changes filename, full path.
545
546         @rtype: boolean
547         @return: whether the changes file was valid or not.  We may want to
548                  reject even if this is True (see what gets put in self.rejects).
549                  This is simply to prevent us even trying things later which will
550                  fail because we couldn't properly parse the file.
551         """
552         Cnf = Config()
553         self.pkg.changes_file = filename
554
555         # Parse the .changes field into a dictionary
556         try:
557             self.pkg.changes.update(parse_changes(filename))
558         except CantOpenError:
559             self.rejects.append("%s: can't read file." % (filename))
560             return False
561         except ParseChangesError as line:
562             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
563             return False
564         except ChangesUnicodeError:
565             self.rejects.append("%s: changes file not proper utf-8" % (filename))
566             return False
567
568         # Parse the Files field from the .changes into another dictionary
569         try:
570             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
571         except ParseChangesError as line:
572             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
573             return False
574         except UnknownFormatError as format:
575             self.rejects.append("%s: unknown format '%s'." % (filename, format))
576             return False
577
578         # Check for mandatory fields
579         for i in ("distribution", "source", "binary", "architecture",
580                   "version", "maintainer", "files", "changes", "description"):
581             if not self.pkg.changes.has_key(i):
582                 # Avoid undefined errors later
583                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
584                 return False
585
586         # Strip a source version in brackets from the source field
587         if re_strip_srcver.search(self.pkg.changes["source"]):
588             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
589
590         # Ensure the source field is a valid package name.
591         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
592             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
593
594         # Split multi-value fields into a lower-level dictionary
595         for i in ("architecture", "distribution", "binary", "closes"):
596             o = self.pkg.changes.get(i, "")
597             if o != "":
598                 del self.pkg.changes[i]
599
600             self.pkg.changes[i] = {}
601
602             for j in o.split():
603                 self.pkg.changes[i][j] = 1
604
605         # Fix the Maintainer: field to be RFC822/2047 compatible
606         try:
607             (self.pkg.changes["maintainer822"],
608              self.pkg.changes["maintainer2047"],
609              self.pkg.changes["maintainername"],
610              self.pkg.changes["maintaineremail"]) = \
611                    fix_maintainer (self.pkg.changes["maintainer"])
612         except ParseMaintError as msg:
613             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
614                    % (filename, self.pkg.changes["maintainer"], msg))
615
616         # ...likewise for the Changed-By: field if it exists.
617         try:
618             (self.pkg.changes["changedby822"],
619              self.pkg.changes["changedby2047"],
620              self.pkg.changes["changedbyname"],
621              self.pkg.changes["changedbyemail"]) = \
622                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
623         except ParseMaintError as msg:
624             self.pkg.changes["changedby822"] = ""
625             self.pkg.changes["changedby2047"] = ""
626             self.pkg.changes["changedbyname"] = ""
627             self.pkg.changes["changedbyemail"] = ""
628
629             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
630                    % (filename, self.pkg.changes["changed-by"], msg))
631
632         # Ensure all the values in Closes: are numbers
633         if self.pkg.changes.has_key("closes"):
634             for i in self.pkg.changes["closes"].keys():
635                 if re_isanum.match (i) == None:
636                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
637
638         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
639         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
640         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
641
642         # Check the .changes is non-empty
643         if not self.pkg.files:
644             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
645             return False
646
647         # Changes was syntactically valid even if we'll reject
648         return True
649
650     ###########################################################################
651
652     def check_distributions(self):
653         "Check and map the Distribution field"
654
655         Cnf = Config()
656
657         # Handle suite mappings
658         for m in Cnf.ValueList("SuiteMappings"):
659             args = m.split()
660             mtype = args[0]
661             if mtype == "map" or mtype == "silent-map":
662                 (source, dest) = args[1:3]
663                 if self.pkg.changes["distribution"].has_key(source):
664                     del self.pkg.changes["distribution"][source]
665                     self.pkg.changes["distribution"][dest] = 1
666                     if mtype != "silent-map":
667                         self.notes.append("Mapping %s to %s." % (source, dest))
668                 if self.pkg.changes.has_key("distribution-version"):
669                     if self.pkg.changes["distribution-version"].has_key(source):
670                         self.pkg.changes["distribution-version"][source]=dest
671             elif mtype == "map-unreleased":
672                 (source, dest) = args[1:3]
673                 if self.pkg.changes["distribution"].has_key(source):
674                     for arch in self.pkg.changes["architecture"].keys():
675                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
676                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
677                             del self.pkg.changes["distribution"][source]
678                             self.pkg.changes["distribution"][dest] = 1
679                             break
680             elif mtype == "ignore":
681                 suite = args[1]
682                 if self.pkg.changes["distribution"].has_key(suite):
683                     del self.pkg.changes["distribution"][suite]
684                     self.warnings.append("Ignoring %s as a target suite." % (suite))
685             elif mtype == "reject":
686                 suite = args[1]
687                 if self.pkg.changes["distribution"].has_key(suite):
688                     self.rejects.append("Uploads to %s are not accepted." % (suite))
689             elif mtype == "propup-version":
690                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
691                 #
692                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
693                 if self.pkg.changes["distribution"].has_key(args[1]):
694                     self.pkg.changes.setdefault("distribution-version", {})
695                     for suite in args[2:]:
696                         self.pkg.changes["distribution-version"][suite] = suite
697
698         # Ensure there is (still) a target distribution
699         if len(self.pkg.changes["distribution"].keys()) < 1:
700             self.rejects.append("No valid distribution remaining.")
701
702         # Ensure target distributions exist
703         for suite in self.pkg.changes["distribution"].keys():
704             if not get_suite(suite.lower()):
705                 self.rejects.append("Unknown distribution `%s'." % (suite))
706
707     ###########################################################################
708
709     def binary_file_checks(self, f, session):
710         cnf = Config()
711         entry = self.pkg.files[f]
712
713         # Extract package control information
714         deb_file = utils.open_file(f)
715         try:
716             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
717         except:
718             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_info()[0]))
719             deb_file.close()
720             # Can't continue, none of the checks on control would work.
721             return
722
723         # Check for mandantory "Description:"
724         deb_file.seek(0)
725         try:
726             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
727         except:
728             self.rejects.append("%s: Missing Description in binary package" % (f))
729             return
730
731         deb_file.close()
732
733         # Check for mandatory fields
734         for field in [ "Package", "Architecture", "Version" ]:
735             if control.Find(field) == None:
736                 # Can't continue
737                 self.rejects.append("%s: No %s field in control." % (f, field))
738                 return
739
740         # Ensure the package name matches the one give in the .changes
741         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
742             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
743
744         # Validate the package field
745         package = control.Find("Package")
746         if not re_valid_pkg_name.match(package):
747             self.rejects.append("%s: invalid package name '%s'." % (f, package))
748
749         # Validate the version field
750         version = control.Find("Version")
751         if not re_valid_version.match(version):
752             self.rejects.append("%s: invalid version number '%s'." % (f, version))
753
754         # Ensure the architecture of the .deb is one we know about.
755         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
756         architecture = control.Find("Architecture")
757         upload_suite = self.pkg.changes["distribution"].keys()[0]
758
759         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
760             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
761             self.rejects.append("Unknown architecture '%s'." % (architecture))
762
763         # Ensure the architecture of the .deb is one of the ones
764         # listed in the .changes.
765         if not self.pkg.changes["architecture"].has_key(architecture):
766             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
767
768         # Sanity-check the Depends field
769         depends = control.Find("Depends")
770         if depends == '':
771             self.rejects.append("%s: Depends field is empty." % (f))
772
773         # Sanity-check the Provides field
774         provides = control.Find("Provides")
775         if provides:
776             provide = re_spacestrip.sub('', provides)
777             if provide == '':
778                 self.rejects.append("%s: Provides field is empty." % (f))
779             prov_list = provide.split(",")
780             for prov in prov_list:
781                 if not re_valid_pkg_name.match(prov):
782                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
783
784         # If there is a Built-Using field, we need to check we can find the
785         # exact source version
786         built_using = control.Find("Built-Using")
787         if built_using:
788             try:
789                 entry["built-using"] = []
790                 for dep in apt_pkg.parse_depends(built_using):
791                     bu_s, bu_v, bu_e = dep[0]
792                     # Check that it's an exact match dependency and we have
793                     # some form of version
794                     if bu_e != "=" or len(bu_v) < 1:
795                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
796                     else:
797                         # Find the source id for this version
798                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
799                         if len(bu_so) != 1:
800                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
801                         else:
802                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
803
804             except ValueError as e:
805                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
806
807
808         # Check the section & priority match those given in the .changes (non-fatal)
809         if     control.Find("Section") and entry["section"] != "" \
810            and entry["section"] != control.Find("Section"):
811             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
812                                 (f, control.Find("Section", ""), entry["section"]))
813         if control.Find("Priority") and entry["priority"] != "" \
814            and entry["priority"] != control.Find("Priority"):
815             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
816                                 (f, control.Find("Priority", ""), entry["priority"]))
817
818         entry["package"] = package
819         entry["architecture"] = architecture
820         entry["version"] = version
821         entry["maintainer"] = control.Find("Maintainer", "")
822
823         if f.endswith(".udeb"):
824             self.pkg.files[f]["dbtype"] = "udeb"
825         elif f.endswith(".deb"):
826             self.pkg.files[f]["dbtype"] = "deb"
827         else:
828             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
829
830         entry["source"] = control.Find("Source", entry["package"])
831
832         # Get the source version
833         source = entry["source"]
834         source_version = ""
835
836         if source.find("(") != -1:
837             m = re_extract_src_version.match(source)
838             source = m.group(1)
839             source_version = m.group(2)
840
841         if not source_version:
842             source_version = self.pkg.files[f]["version"]
843
844         entry["source package"] = source
845         entry["source version"] = source_version
846
847         # Ensure the filename matches the contents of the .deb
848         m = re_isadeb.match(f)
849
850         #  package name
851         file_package = m.group(1)
852         if entry["package"] != file_package:
853             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
854                                 (f, file_package, entry["dbtype"], entry["package"]))
855         epochless_version = re_no_epoch.sub('', control.Find("Version"))
856
857         #  version
858         file_version = m.group(2)
859         if epochless_version != file_version:
860             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
861                                 (f, file_version, entry["dbtype"], epochless_version))
862
863         #  architecture
864         file_architecture = m.group(3)
865         if entry["architecture"] != file_architecture:
866             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
867                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
868
869         # Check for existent source
870         source_version = entry["source version"]
871         source_package = entry["source package"]
872         if self.pkg.changes["architecture"].has_key("source"):
873             if source_version != self.pkg.changes["version"]:
874                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
875                                     (source_version, f, self.pkg.changes["version"]))
876         else:
877             # Check in the SQL database
878             if not source_exists(source_package, source_version, suites = \
879                 self.pkg.changes["distribution"].keys(), session = session):
880                 # Check in one of the other directories
881                 source_epochless_version = re_no_epoch.sub('', source_version)
882                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
883
884                 byhand_dir = get_policy_queue('byhand', session).path
885                 new_dir = get_policy_queue('new', session).path
886
887                 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
888                     entry["byhand"] = 1
889                 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
890                     entry["new"] = 1
891                 else:
892                     dsc_file_exists = False
893                     # TODO: Don't hardcode this list: use all relevant queues
894                     #       The question is how to determine what is relevant
895                     for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
896                         queue = get_policy_queue(queue_name, session)
897                         if queue:
898                             if os.path.exists(os.path.join(queue.path, dsc_filename)):
899                                 dsc_file_exists = True
900                                 break
901
902                     if not dsc_file_exists:
903                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
904
905         # Check the version and for file overwrites
906         self.check_binary_against_db(f, session)
907
908     def source_file_checks(self, f, session):
909         entry = self.pkg.files[f]
910
911         m = re_issource.match(f)
912         if not m:
913             return
914
915         entry["package"] = m.group(1)
916         entry["version"] = m.group(2)
917         entry["type"] = m.group(3)
918
919         # Ensure the source package name matches the Source filed in the .changes
920         if self.pkg.changes["source"] != entry["package"]:
921             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
922
923         # Ensure the source version matches the version in the .changes file
924         if re_is_orig_source.match(f):
925             changes_version = self.pkg.changes["chopversion2"]
926         else:
927             changes_version = self.pkg.changes["chopversion"]
928
929         if changes_version != entry["version"]:
930             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
931
932         # Ensure the .changes lists source in the Architecture field
933         if not self.pkg.changes["architecture"].has_key("source"):
934             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
935
936         # Check the signature of a .dsc file
937         if entry["type"] == "dsc":
938             # check_signature returns either:
939             #  (None, [list, of, rejects]) or (signature, [])
940             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
941             for j in rejects:
942                 self.rejects.append(j)
943
944         entry["architecture"] = "source"
945
946     def per_suite_file_checks(self, f, suite, session):
947         cnf = Config()
948         entry = self.pkg.files[f]
949
950         # Skip byhand
951         if entry.has_key("byhand"):
952             return
953
954         # Check we have fields we need to do these checks
955         oktogo = True
956         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
957             if not entry.has_key(m):
958                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
959                 oktogo = False
960
961         if not oktogo:
962             return
963
964         # Handle component mappings
965         for m in cnf.ValueList("ComponentMappings"):
966             (source, dest) = m.split()
967             if entry["component"] == source:
968                 entry["original component"] = source
969                 entry["component"] = dest
970
971         # Ensure the component is valid for the target suite
972         if entry["component"] not in get_component_names(session):
973             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
974             return
975
976         # Validate the component
977         if not get_component(entry["component"], session):
978             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
979             return
980
981         # See if the package is NEW
982         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
983             entry["new"] = 1
984
985         # Validate the priority
986         if entry["priority"].find('/') != -1:
987             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
988
989         # Determine the location
990         location = cnf["Dir::Pool"]
991         l = get_location(location, entry["component"], session=session)
992         if l is None:
993             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
994             entry["location id"] = -1
995         else:
996             entry["location id"] = l.location_id
997
998         # Check the md5sum & size against existing files (if any)
999         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1000
1001         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1002                                          entry["size"], entry["md5sum"], entry["location id"])
1003
1004         if found is None:
1005             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1006         elif found is False and poolfile is not None:
1007             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1008         else:
1009             if poolfile is None:
1010                 entry["files id"] = None
1011             else:
1012                 entry["files id"] = poolfile.file_id
1013
1014         # Check for packages that have moved from one component to another
1015         entry['suite'] = suite
1016         arch_list = [entry["architecture"], 'all']
1017         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1018             [suite], arch_list = arch_list, session = session)
1019         if component is not None:
1020             entry["othercomponents"] = component
1021
1022     def check_files(self, action=True):
1023         file_keys = self.pkg.files.keys()
1024         holding = Holding()
1025         cnf = Config()
1026
1027         if action:
1028             cwd = os.getcwd()
1029             os.chdir(self.pkg.directory)
1030             for f in file_keys:
1031                 ret = holding.copy_to_holding(f)
1032                 if ret is not None:
1033                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1034
1035             os.chdir(cwd)
1036
1037         # check we already know the changes file
1038         # [NB: this check must be done post-suite mapping]
1039         base_filename = os.path.basename(self.pkg.changes_file)
1040
1041         session = DBConn().session()
1042
1043         try:
1044             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1045             # if in the pool or in a queue other than unchecked, reject
1046             if (dbc.in_queue is None) \
1047                    or (dbc.in_queue is not None
1048                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1049                 self.rejects.append("%s file already known to dak" % base_filename)
1050         except NoResultFound as e:
1051             # not known, good
1052             pass
1053
1054         has_binaries = False
1055         has_source = False
1056
1057         for f, entry in self.pkg.files.items():
1058             # Ensure the file does not already exist in one of the accepted directories
1059             # TODO: Dynamically generate this list
1060             for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1061                 queue = get_policy_queue(queue_name, session)
1062                 if queue and os.path.exists(os.path.join(queue.path, f)):
1063                     self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1064
1065             if not re_taint_free.match(f):
1066                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1067
1068             # Check the file is readable
1069             if os.access(f, os.R_OK) == 0:
1070                 # When running in -n, copy_to_holding() won't have
1071                 # generated the reject_message, so we need to.
1072                 if action:
1073                     if os.path.exists(f):
1074                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1075                     else:
1076                         # Don't directly reject, mark to check later to deal with orig's
1077                         # we can find in the pool
1078                         self.later_check_files.append(f)
1079                 entry["type"] = "unreadable"
1080                 continue
1081
1082             # If it's byhand skip remaining checks
1083             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1084                 entry["byhand"] = 1
1085                 entry["type"] = "byhand"
1086
1087             # Checks for a binary package...
1088             elif re_isadeb.match(f):
1089                 has_binaries = True
1090                 entry["type"] = "deb"
1091
1092                 # This routine appends to self.rejects/warnings as appropriate
1093                 self.binary_file_checks(f, session)
1094
1095             # Checks for a source package...
1096             elif re_issource.match(f):
1097                 has_source = True
1098
1099                 # This routine appends to self.rejects/warnings as appropriate
1100                 self.source_file_checks(f, session)
1101
1102             # Not a binary or source package?  Assume byhand...
1103             else:
1104                 entry["byhand"] = 1
1105                 entry["type"] = "byhand"
1106
1107             # Per-suite file checks
1108             entry["oldfiles"] = {}
1109             for suite in self.pkg.changes["distribution"].keys():
1110                 self.per_suite_file_checks(f, suite, session)
1111
1112         session.close()
1113
1114         # If the .changes file says it has source, it must have source.
1115         if self.pkg.changes["architecture"].has_key("source"):
1116             if not has_source:
1117                 self.rejects.append("no source found and Architecture line in changes mention source.")
1118
1119             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1120                 self.rejects.append("source only uploads are not supported.")
1121
1122     ###########################################################################
1123
1124     def __dsc_filename(self):
1125         """
1126         Returns: (Status, Dsc_Filename)
1127         where
1128           Status: Boolean; True when there was no error, False otherwise
1129           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1130         """
1131         dsc_filename = None
1132
1133         # find the dsc
1134         for name, entry in self.pkg.files.items():
1135             if entry.has_key("type") and entry["type"] == "dsc":
1136                 if dsc_filename:
1137                     return False, "cannot process a .changes file with multiple .dsc's."
1138                 else:
1139                     dsc_filename = name
1140
1141         if not dsc_filename:
1142             return False, "source uploads must contain a dsc file"
1143
1144         return True, dsc_filename
1145
1146     def load_dsc(self, action=True, signing_rules=1):
1147         """
1148         Find and load the dsc from self.pkg.files into self.dsc
1149
1150         Returns: (Status, Reason)
1151         where
1152           Status: Boolean; True when there was no error, False otherwise
1153           Reason: String; When Status is False this describes the error
1154         """
1155
1156         # find the dsc
1157         (status, dsc_filename) = self.__dsc_filename()
1158         if not status:
1159             # If status is false, dsc_filename has the reason
1160             return False, dsc_filename
1161
1162         try:
1163             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1164         except CantOpenError:
1165             if not action:
1166                 return False, "%s: can't read file." % (dsc_filename)
1167         except ParseChangesError as line:
1168             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1169         except InvalidDscError as line:
1170             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1171         except ChangesUnicodeError:
1172             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1173
1174         return True, None
1175
1176     ###########################################################################
1177
1178     def check_dsc(self, action=True, session=None):
1179         """Returns bool indicating whether or not the source changes are valid"""
1180         # Ensure there is source to check
1181         if not self.pkg.changes["architecture"].has_key("source"):
1182             return True
1183
1184         if session is None:
1185             session = DBConn().session()
1186
1187         (status, reason) = self.load_dsc(action=action)
1188         if not status:
1189             self.rejects.append(reason)
1190             return False
1191         (status, dsc_filename) = self.__dsc_filename()
1192         if not status:
1193             # If status is false, dsc_filename has the reason
1194             self.rejects.append(dsc_filename)
1195             return False
1196
1197         # Build up the file list of files mentioned by the .dsc
1198         try:
1199             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1200         except NoFilesFieldError:
1201             self.rejects.append("%s: no Files: field." % (dsc_filename))
1202             return False
1203         except UnknownFormatError as format:
1204             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1205             return False
1206         except ParseChangesError as line:
1207             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1208             return False
1209
1210         # Enforce mandatory fields
1211         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1212             if not self.pkg.dsc.has_key(i):
1213                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1214                 return False
1215
1216         # Validate the source and version fields
1217         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1218             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1219         if not re_valid_version.match(self.pkg.dsc["version"]):
1220             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1221
1222         # Only a limited list of source formats are allowed in each suite
1223         for dist in self.pkg.changes["distribution"].keys():
1224             suite = get_suite(dist, session=session)
1225             if not suite:
1226                 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1227                 continue
1228             allowed = [ x.format_name for x in suite.srcformats ]
1229             if self.pkg.dsc["format"] not in allowed:
1230                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1231
1232         # Validate the Maintainer field
1233         try:
1234             # We ignore the return value
1235             fix_maintainer(self.pkg.dsc["maintainer"])
1236         except ParseMaintError as msg:
1237             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1238                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1239
1240         # Validate the build-depends field(s)
1241         for field_name in [ "build-depends", "build-depends-indep" ]:
1242             field = self.pkg.dsc.get(field_name)
1243             if field:
1244                 # Have apt try to parse them...
1245                 try:
1246                     apt_pkg.ParseSrcDepends(field)
1247                 except:
1248                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1249
1250         # Ensure the version number in the .dsc matches the version number in the .changes
1251         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1252         changes_version = self.pkg.files[dsc_filename]["version"]
1253
1254         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1255             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1256
1257         # Ensure the Files field contain only what's expected
1258         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1259
1260         # Ensure source is newer than existing source in target suites
1261         session = DBConn().session()
1262         self.check_source_against_db(dsc_filename, session)
1263         self.check_dsc_against_db(dsc_filename, session)
1264
1265         dbchg = get_dbchange(self.pkg.changes_file, session)
1266
1267         # Finally, check if we're missing any files
1268         for f in self.later_check_files:
1269             print 'XXX: %s' % f
1270             # Check if we've already processed this file if we have a dbchg object
1271             ok = False
1272             if dbchg:
1273                 for pf in dbchg.files:
1274                     if pf.filename == f and pf.processed:
1275                         self.notes.append('%s was already processed so we can go ahead' % f)
1276                         ok = True
1277                         del self.pkg.files[f]
1278             if not ok:
1279                 self.rejects.append("Could not find file %s references in changes" % f)
1280
1281         session.close()
1282
1283         return (len(self.rejects) == 0)
1284
1285     ###########################################################################
1286
1287     def get_changelog_versions(self, source_dir):
1288         """Extracts a the source package and (optionally) grabs the
1289         version history out of debian/changelog for the BTS."""
1290
1291         cnf = Config()
1292
1293         # Find the .dsc (again)
1294         dsc_filename = None
1295         for f in self.pkg.files.keys():
1296             if self.pkg.files[f]["type"] == "dsc":
1297                 dsc_filename = f
1298
1299         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1300         if not dsc_filename:
1301             return
1302
1303         # Create a symlink mirror of the source files in our temporary directory
1304         for f in self.pkg.files.keys():
1305             m = re_issource.match(f)
1306             if m:
1307                 src = os.path.join(source_dir, f)
1308                 # If a file is missing for whatever reason, give up.
1309                 if not os.path.exists(src):
1310                     return
1311                 ftype = m.group(3)
1312                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1313                    self.pkg.orig_files[f].has_key("path"):
1314                     continue
1315                 dest = os.path.join(os.getcwd(), f)
1316                 os.symlink(src, dest)
1317
1318         # If the orig files are not a part of the upload, create symlinks to the
1319         # existing copies.
1320         for orig_file in self.pkg.orig_files.keys():
1321             if not self.pkg.orig_files[orig_file].has_key("path"):
1322                 continue
1323             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1324             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1325
1326         # Extract the source
1327         try:
1328             unpacked = UnpackedSource(dsc_filename)
1329         except Exception as e:
1330             self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1331             return
1332
1333         if not cnf.Find("Dir::BTSVersionTrack"):
1334             return
1335
1336         # Get the upstream version
1337         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1338         if re_strip_revision.search(upstr_version):
1339             upstr_version = re_strip_revision.sub('', upstr_version)
1340
1341         # Ensure the changelog file exists
1342         changelog_file = unpacked.get_changelog_file()
1343         if changelog_file is None:
1344             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1345             return
1346
1347         # Parse the changelog
1348         self.pkg.dsc["bts changelog"] = ""
1349         for line in changelog_file.readlines():
1350             m = re_changelog_versions.match(line)
1351             if m:
1352                 self.pkg.dsc["bts changelog"] += line
1353         changelog_file.close()
1354         unpacked.cleanup()
1355
1356         # Check we found at least one revision in the changelog
1357         if not self.pkg.dsc["bts changelog"]:
1358             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1359
1360     def check_source(self):
1361         # Bail out if:
1362         #    a) there's no source
1363         if not self.pkg.changes["architecture"].has_key("source"):
1364             return
1365
1366         tmpdir = utils.temp_dirname()
1367
1368         # Move into the temporary directory
1369         cwd = os.getcwd()
1370         os.chdir(tmpdir)
1371
1372         # Get the changelog version history
1373         self.get_changelog_versions(cwd)
1374
1375         # Move back and cleanup the temporary tree
1376         os.chdir(cwd)
1377
1378         try:
1379             shutil.rmtree(tmpdir)
1380         except OSError as e:
1381             if e.errno != errno.EACCES:
1382                 print "foobar"
1383                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1384
1385             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1386             # We probably have u-r or u-w directories so chmod everything
1387             # and try again.
1388             cmd = "chmod -R u+rwx %s" % (tmpdir)
1389             result = os.system(cmd)
1390             if result != 0:
1391                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1392             shutil.rmtree(tmpdir)
1393         except Exception as e:
1394             print "foobar2 (%s)" % e
1395             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1396
1397     ###########################################################################
1398     def ensure_hashes(self):
1399         # Make sure we recognise the format of the Files: field in the .changes
1400         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1401         if len(format) == 2:
1402             format = int(format[0]), int(format[1])
1403         else:
1404             format = int(float(format[0])), 0
1405
1406         # We need to deal with the original changes blob, as the fields we need
1407         # might not be in the changes dict serialised into the .dak anymore.
1408         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1409
1410         # Copy the checksums over to the current changes dict.  This will keep
1411         # the existing modifications to it intact.
1412         for field in orig_changes:
1413             if field.startswith('checksums-'):
1414                 self.pkg.changes[field] = orig_changes[field]
1415
1416         # Check for unsupported hashes
1417         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1418             self.rejects.append(j)
1419
1420         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1421             self.rejects.append(j)
1422
1423         # We have to calculate the hash if we have an earlier changes version than
1424         # the hash appears in rather than require it exist in the changes file
1425         for hashname, hashfunc, version in utils.known_hashes:
1426             # TODO: Move _ensure_changes_hash into this class
1427             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1428                 self.rejects.append(j)
1429             if "source" in self.pkg.changes["architecture"]:
1430                 # TODO: Move _ensure_dsc_hash into this class
1431                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1432                     self.rejects.append(j)
1433
1434     def check_hashes(self):
1435         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1436             self.rejects.append(m)
1437
1438         for m in utils.check_size(".changes", self.pkg.files):
1439             self.rejects.append(m)
1440
1441         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1442             self.rejects.append(m)
1443
1444         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1445             self.rejects.append(m)
1446
1447         self.ensure_hashes()
1448
1449     ###########################################################################
1450
1451     def ensure_orig(self, target_dir='.', session=None):
1452         """
1453         Ensures that all orig files mentioned in the changes file are present
1454         in target_dir. If they do not exist, they are symlinked into place.
1455
1456         An list containing the symlinks that were created are returned (so they
1457         can be removed).
1458         """
1459
1460         symlinked = []
1461         cnf = Config()
1462
1463         for filename, entry in self.pkg.dsc_files.iteritems():
1464             if not re_is_orig_source.match(filename):
1465                 # File is not an orig; ignore
1466                 continue
1467
1468             if os.path.exists(filename):
1469                 # File exists, no need to continue
1470                 continue
1471
1472             def symlink_if_valid(path):
1473                 f = utils.open_file(path)
1474                 md5sum = apt_pkg.md5sum(f)
1475                 f.close()
1476
1477                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1478                 expected = (int(entry['size']), entry['md5sum'])
1479
1480                 if fingerprint != expected:
1481                     return False
1482
1483                 dest = os.path.join(target_dir, filename)
1484
1485                 os.symlink(path, dest)
1486                 symlinked.append(dest)
1487
1488                 return True
1489
1490             session_ = session
1491             if session is None:
1492                 session_ = DBConn().session()
1493
1494             found = False
1495
1496             # Look in the pool
1497             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1498                 poolfile_path = os.path.join(
1499                     poolfile.location.path, poolfile.filename
1500                 )
1501
1502                 if symlink_if_valid(poolfile_path):
1503                     found = True
1504                     break
1505
1506             if session is None:
1507                 session_.close()
1508
1509             if found:
1510                 continue
1511
1512             # Look in some other queues for the file
1513             queue_names = ['new', 'byhand',
1514                            'proposedupdates', 'oldproposedupdates',
1515                            'embargoed', 'unembargoed']
1516
1517             for queue_name in queue_names:
1518                 queue = get_policy_queue(queue_name, session)
1519                 if not queue:
1520                     continue
1521
1522                 queuefile_path = os.path.join(queue.path, filename)
1523
1524                 if not os.path.exists(queuefile_path):
1525                     # Does not exist in this queue
1526                     continue
1527
1528                 if symlink_if_valid(queuefile_path):
1529                     break
1530
1531         return symlinked
1532
1533     ###########################################################################
1534
1535     def check_lintian(self):
1536         """
1537         Extends self.rejects by checking the output of lintian against tags
1538         specified in Dinstall::LintianTags.
1539         """
1540
1541         cnf = Config()
1542
1543         # Don't reject binary uploads
1544         if not self.pkg.changes['architecture'].has_key('source'):
1545             return
1546
1547         # Only check some distributions
1548         for dist in ('unstable', 'experimental'):
1549             if dist in self.pkg.changes['distribution']:
1550                 break
1551         else:
1552             return
1553
1554         # If we do not have a tagfile, don't do anything
1555         tagfile = cnf.get("Dinstall::LintianTags")
1556         if not tagfile:
1557             return
1558
1559         # Parse the yaml file
1560         sourcefile = file(tagfile, 'r')
1561         sourcecontent = sourcefile.read()
1562         sourcefile.close()
1563
1564         try:
1565             lintiantags = yaml.load(sourcecontent)['lintian']
1566         except yaml.YAMLError as msg:
1567             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1568             return
1569
1570         # Try and find all orig mentioned in the .dsc
1571         symlinked = self.ensure_orig()
1572
1573         # Setup the input file for lintian
1574         fd, temp_filename = utils.temp_filename()
1575         temptagfile = os.fdopen(fd, 'w')
1576         for tags in lintiantags.values():
1577             temptagfile.writelines(['%s\n' % x for x in tags])
1578         temptagfile.close()
1579
1580         try:
1581             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1582                 (temp_filename, self.pkg.changes_file)
1583
1584             result, output = commands.getstatusoutput(cmd)
1585         finally:
1586             # Remove our tempfile and any symlinks we created
1587             os.unlink(temp_filename)
1588
1589             for symlink in symlinked:
1590                 os.unlink(symlink)
1591
1592         if result == 2:
1593             utils.warn("lintian failed for %s [return code: %s]." % \
1594                 (self.pkg.changes_file, result))
1595             utils.warn(utils.prefix_multi_line_string(output, \
1596                 " [possible output:] "))
1597
1598         def log(*txt):
1599             if self.logger:
1600                 self.logger.log(
1601                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1602                 )
1603
1604         # Generate messages
1605         parsed_tags = parse_lintian_output(output)
1606         self.rejects.extend(
1607             generate_reject_messages(parsed_tags, lintiantags, log=log)
1608         )
1609
1610     ###########################################################################
1611     def check_urgency(self):
1612         cnf = Config()
1613         if self.pkg.changes["architecture"].has_key("source"):
1614             if not self.pkg.changes.has_key("urgency"):
1615                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1616             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1617             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1618                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1619                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1620                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1621
1622     ###########################################################################
1623
1624     # Sanity check the time stamps of files inside debs.
1625     # [Files in the near future cause ugly warnings and extreme time
1626     #  travel can cause errors on extraction]
1627
1628     def check_timestamps(self):
1629         Cnf = Config()
1630
1631         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1632         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1633         tar = TarTime(future_cutoff, past_cutoff)
1634
1635         for filename, entry in self.pkg.files.items():
1636             if entry["type"] == "deb":
1637                 tar.reset()
1638                 try:
1639                     deb = apt_inst.DebFile(filename)
1640                     deb.control.go(tar.callback)
1641
1642                     future_files = tar.future_files.keys()
1643                     if future_files:
1644                         num_future_files = len(future_files)
1645                         future_file = future_files[0]
1646                         future_date = tar.future_files[future_file]
1647                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1649
1650                     ancient_files = tar.ancient_files.keys()
1651                     if ancient_files:
1652                         num_ancient_files = len(ancient_files)
1653                         ancient_file = ancient_files[0]
1654                         ancient_date = tar.ancient_files[ancient_file]
1655                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1657                 except:
1658                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1659
1660     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661         for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1662             if not self.pkg.changes.has_key(key):
1663                 return False
1664         uid_email = '@'.join(uid_email.split('@')[:2])
1665         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1666             sponsored = False
1667         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1668             sponsored = False
1669             if uid_name == "":
1670                 sponsored = True
1671         else:
1672             sponsored = True
1673             sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1674             debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1675             if uid_email not in debian_emails:
1676                 if debian_emails:
1677                     uid_email = debian_emails[0]
1678             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1679                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1680                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1681                         self.pkg.changes["sponsoremail"] = uid_email
1682
1683         return sponsored
1684
1685
1686     ###########################################################################
1687     # check_signed_by_key checks
1688     ###########################################################################
1689
1690     def check_signed_by_key(self):
1691         """Ensure the .changes is signed by an authorized uploader."""
1692         session = DBConn().session()
1693
1694         # First of all we check that the person has proper upload permissions
1695         # and that this upload isn't blocked
1696         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1697
1698         if fpr is None:
1699             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1700             return
1701
1702         # TODO: Check that import-keyring adds UIDs properly
1703         if not fpr.uid:
1704             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1705             return
1706
1707         # Check that the fingerprint which uploaded has permission to do so
1708         self.check_upload_permissions(fpr, session)
1709
1710         # Check that this package is not in a transition
1711         self.check_transition(session)
1712
1713         session.close()
1714
1715
1716     def check_upload_permissions(self, fpr, session):
1717         # Check any one-off upload blocks
1718         self.check_upload_blocks(fpr, session)
1719
1720         # If the source_acl is None, source is never allowed
1721         if fpr.source_acl is None:
1722             if self.pkg.changes["architecture"].has_key("source"):
1723                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1724                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1725                 self.rejects.append(rej)
1726                 return
1727         # Do DM as a special case
1728         # DM is a special case unfortunately, so we check it first
1729         # (keys with no source access get more access than DMs in one
1730         #  way; DMs can only upload for their packages whether source
1731         #  or binary, whereas keys with no access might be able to
1732         #  upload some binaries)
1733         elif fpr.source_acl.access_level == 'dm':
1734             self.check_dm_upload(fpr, session)
1735         else:
1736             # If not a DM, we allow full upload rights
1737             uid_email = "%s@debian.org" % (fpr.uid.uid)
1738             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1739
1740
1741         # Check binary upload permissions
1742         # By this point we know that DMs can't have got here unless they
1743         # are allowed to deal with the package concerned so just apply
1744         # normal checks
1745         if fpr.binary_acl.access_level == 'full':
1746             return
1747
1748         # Otherwise we're in the map case
1749         tmparches = self.pkg.changes["architecture"].copy()
1750         tmparches.pop('source', None)
1751
1752         for bam in fpr.binary_acl_map:
1753             tmparches.pop(bam.architecture.arch_string, None)
1754
1755         if len(tmparches.keys()) > 0:
1756             if fpr.binary_reject:
1757                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1758                 if len(tmparches.keys()) == 1:
1759                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1760                 else:
1761                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1762                 self.rejects.append(rej)
1763             else:
1764                 # TODO: This is where we'll implement reject vs throw away binaries later
1765                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1766                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1767                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1768                 self.rejects.append(rej)
1769
1770
1771     def check_upload_blocks(self, fpr, session):
1772         """Check whether any upload blocks apply to this source, source
1773            version, uid / fpr combination"""
1774
1775         def block_rej_template(fb):
1776             rej = 'Manual upload block in place for package %s' % fb.source
1777             if fb.version is not None:
1778                 rej += ', version %s' % fb.version
1779             return rej
1780
1781         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1782             # version is None if the block applies to all versions
1783             if fb.version is None or fb.version == self.pkg.changes['version']:
1784                 # Check both fpr and uid - either is enough to cause a reject
1785                 if fb.fpr is not None:
1786                     if fb.fpr.fingerprint == fpr.fingerprint:
1787                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1788                 if fb.uid is not None:
1789                     if fb.uid == fpr.uid:
1790                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1791
1792
1793     def check_dm_upload(self, fpr, session):
1794         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1795         ## none of the uploaded packages are NEW
1796         rej = False
1797         for f in self.pkg.files.keys():
1798             if self.pkg.files[f].has_key("byhand"):
1799                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1800                 rej = True
1801             if self.pkg.files[f].has_key("new"):
1802                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1803                 rej = True
1804
1805         if rej:
1806             return
1807
1808         r = get_newest_source(self.pkg.changes["source"], session)
1809
1810         if r is None:
1811             rej = "Could not find existing source package %s in the DM allowed suites and this is a DM upload" % self.pkg.changes["source"]
1812             self.rejects.append(rej)
1813             return
1814
1815         if not r.dm_upload_allowed:
1816             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1817             self.rejects.append(rej)
1818             return
1819
1820         ## the Maintainer: field of the uploaded .changes file corresponds with
1821         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1822         ## uploads)
1823         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1824             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1825
1826         ## the most recent version of the package uploaded to unstable or
1827         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1828         ## non-developer maintainers cannot NMU or hijack packages)
1829
1830         # uploader includes the maintainer
1831         accept = False
1832         for uploader in r.uploaders:
1833             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1834             # Eww - I hope we never have two people with the same name in Debian
1835             if email == fpr.uid.uid or name == fpr.uid.name:
1836                 accept = True
1837                 break
1838
1839         if not accept:
1840             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1841             return
1842
1843         ## none of the packages are being taken over from other source packages
1844         for b in self.pkg.changes["binary"].keys():
1845             for suite in self.pkg.changes["distribution"].keys():
1846                 for s in get_source_by_package_and_suite(b, suite, session):
1847                     if s.source != self.pkg.changes["source"]:
1848                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1849
1850
1851
1852     def check_transition(self, session):
1853         cnf = Config()
1854
1855         sourcepkg = self.pkg.changes["source"]
1856
1857         # No sourceful upload -> no need to do anything else, direct return
1858         # We also work with unstable uploads, not experimental or those going to some
1859         # proposed-updates queue
1860         if "source" not in self.pkg.changes["architecture"] or \
1861            "unstable" not in self.pkg.changes["distribution"]:
1862             return
1863
1864         # Also only check if there is a file defined (and existant) with
1865         # checks.
1866         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1867         if transpath == "" or not os.path.exists(transpath):
1868             return
1869
1870         # Parse the yaml file
1871         sourcefile = file(transpath, 'r')
1872         sourcecontent = sourcefile.read()
1873         try:
1874             transitions = yaml.load(sourcecontent)
1875         except yaml.YAMLError as msg:
1876             # This shouldn't happen, there is a wrapper to edit the file which
1877             # checks it, but we prefer to be safe than ending up rejecting
1878             # everything.
1879             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1880             return
1881
1882         # Now look through all defined transitions
1883         for trans in transitions:
1884             t = transitions[trans]
1885             source = t["source"]
1886             expected = t["new"]
1887
1888             # Will be None if nothing is in testing.
1889             current = get_source_in_suite(source, "testing", session)
1890             if current is not None:
1891                 compare = apt_pkg.VersionCompare(current.version, expected)
1892
1893             if current is None or compare < 0:
1894                 # This is still valid, the current version in testing is older than
1895                 # the new version we wait for, or there is none in testing yet
1896
1897                 # Check if the source we look at is affected by this.
1898                 if sourcepkg in t['packages']:
1899                     # The source is affected, lets reject it.
1900
1901                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1902                         sourcepkg, trans)
1903
1904                     if current is not None:
1905                         currentlymsg = "at version %s" % (current.version)
1906                     else:
1907                         currentlymsg = "not present in testing"
1908
1909                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1910
1911                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1912 is part of a testing transition designed to get %s migrated (it is
1913 currently %s, we need version %s).  This transition is managed by the
1914 Release Team, and %s is the Release-Team member responsible for it.
1915 Please mail debian-release@lists.debian.org or contact %s directly if you
1916 need further assistance.  You might want to upload to experimental until this
1917 transition is done."""
1918                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1919
1920                     self.rejects.append(rejectmsg)
1921                     return
1922
1923     ###########################################################################
1924     # End check_signed_by_key checks
1925     ###########################################################################
1926
1927     def build_summaries(self):
1928         """ Build a summary of changes the upload introduces. """
1929
1930         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1931
1932         short_summary = summary
1933
1934         # This is for direport's benefit...
1935         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1936
1937         summary += "\n\nChanges:\n" + f
1938
1939         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1940
1941         summary += self.announce(short_summary, 0)
1942
1943         return (summary, short_summary)
1944
1945     ###########################################################################
1946
1947     def close_bugs(self, summary, action):
1948         """
1949         Send mail to close bugs as instructed by the closes field in the changes file.
1950         Also add a line to summary if any work was done.
1951
1952         @type summary: string
1953         @param summary: summary text, as given by L{build_summaries}
1954
1955         @type action: bool
1956         @param action: Set to false no real action will be done.
1957
1958         @rtype: string
1959         @return: summary. If action was taken, extended by the list of closed bugs.
1960
1961         """
1962
1963         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1964
1965         bugs = self.pkg.changes["closes"].keys()
1966
1967         if not bugs:
1968             return summary
1969
1970         bugs.sort()
1971         summary += "Closing bugs: "
1972         for bug in bugs:
1973             summary += "%s " % (bug)
1974             if action:
1975                 self.update_subst()
1976                 self.Subst["__BUG_NUMBER__"] = bug
1977                 if self.pkg.changes["distribution"].has_key("stable"):
1978                     self.Subst["__STABLE_WARNING__"] = """
1979 Note that this package is not part of the released stable Debian
1980 distribution.  It may have dependencies on other unreleased software,
1981 or other instabilities.  Please take care if you wish to install it.
1982 The update will eventually make its way into the next released Debian
1983 distribution."""
1984                 else:
1985                     self.Subst["__STABLE_WARNING__"] = ""
1986                 mail_message = utils.TemplateSubst(self.Subst, template)
1987                 utils.send_mail(mail_message)
1988
1989                 # Clear up after ourselves
1990                 del self.Subst["__BUG_NUMBER__"]
1991                 del self.Subst["__STABLE_WARNING__"]
1992
1993         if action and self.logger:
1994             self.logger.log(["closing bugs"] + bugs)
1995
1996         summary += "\n"
1997
1998         return summary
1999
2000     ###########################################################################
2001
2002     def announce(self, short_summary, action):
2003         """
2004         Send an announce mail about a new upload.
2005
2006         @type short_summary: string
2007         @param short_summary: Short summary text to include in the mail
2008
2009         @type action: bool
2010         @param action: Set to false no real action will be done.
2011
2012         @rtype: string
2013         @return: Textstring about action taken.
2014
2015         """
2016
2017         cnf = Config()
2018
2019         # Skip all of this if not sending mail to avoid confusing people
2020         if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2021             return ""
2022
2023         # Only do announcements for source uploads with a recent dpkg-dev installed
2024         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2025            self.pkg.changes["architecture"].has_key("source"):
2026             return ""
2027
2028         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2029
2030         lists_todo = {}
2031         summary = ""
2032
2033         # Get a unique list of target lists
2034         for dist in self.pkg.changes["distribution"].keys():
2035             suite = get_suite(dist)
2036             if suite is None: continue
2037             for tgt in suite.announce:
2038                 lists_todo[tgt] = 1
2039
2040         self.Subst["__SHORT_SUMMARY__"] = short_summary
2041
2042         for announce_list in lists_todo.keys():
2043             summary += "Announcing to %s\n" % (announce_list)
2044
2045             if action:
2046                 self.update_subst()
2047                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2048                 if cnf.get("Dinstall::TrackingServer") and \
2049                    self.pkg.changes["architecture"].has_key("source"):
2050                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2051                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2052
2053                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2054                 utils.send_mail(mail_message)
2055
2056                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2057
2058         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2059             summary = self.close_bugs(summary, action)
2060
2061         del self.Subst["__SHORT_SUMMARY__"]
2062
2063         return summary
2064
2065     ###########################################################################
2066     @session_wrapper
2067     def accept (self, summary, short_summary, session=None):
2068         """
2069         Accept an upload.
2070
2071         This moves all files referenced from the .changes into the pool,
2072         sends the accepted mail, announces to lists, closes bugs and
2073         also checks for override disparities. If enabled it will write out
2074         the version history for the BTS Version Tracking and will finally call
2075         L{queue_build}.
2076
2077         @type summary: string
2078         @param summary: Summary text
2079
2080         @type short_summary: string
2081         @param short_summary: Short summary
2082         """
2083
2084         cnf = Config()
2085         stats = SummaryStats()
2086
2087         print "Installing."
2088         self.logger.log(["installing changes", self.pkg.changes_file])
2089
2090         binaries = []
2091         poolfiles = []
2092
2093         # Add the .dsc file to the DB first
2094         for newfile, entry in self.pkg.files.items():
2095             if entry["type"] == "dsc":
2096                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2097                 for j in pfs:
2098                     poolfiles.append(j)
2099
2100         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2101         for newfile, entry in self.pkg.files.items():
2102             if entry["type"] == "deb":
2103                 b, pf = add_deb_to_db(self, newfile, session)
2104                 binaries.append(b)
2105                 poolfiles.append(pf)
2106
2107         # If this is a sourceful diff only upload that is moving
2108         # cross-component we need to copy the .orig files into the new
2109         # component too for the same reasons as above.
2110         # XXX: mhy: I think this should be in add_dsc_to_db
2111         if self.pkg.changes["architecture"].has_key("source"):
2112             for orig_file in self.pkg.orig_files.keys():
2113                 if not self.pkg.orig_files[orig_file].has_key("id"):
2114                     continue # Skip if it's not in the pool
2115                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2116                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2117                     continue # Skip if the location didn't change
2118
2119                 # Do the move
2120                 oldf = get_poolfile_by_id(orig_file_id, session)
2121                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2122                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2123                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2124
2125                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2126
2127                 # TODO: Care about size/md5sum collisions etc
2128                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2129
2130                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2131                 if newf is None:
2132                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2133                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2134
2135                     session.flush()
2136
2137                     # Don't reference the old file from this changes
2138                     for p in poolfiles:
2139                         if p.file_id == oldf.file_id:
2140                             poolfiles.remove(p)
2141
2142                     poolfiles.append(newf)
2143
2144                     # Fix up the DSC references
2145                     toremove = []
2146
2147                     for df in source.srcfiles:
2148                         if df.poolfile.file_id == oldf.file_id:
2149                             # Add a new DSC entry and mark the old one for deletion
2150                             # Don't do it in the loop so we don't change the thing we're iterating over
2151                             newdscf = DSCFile()
2152                             newdscf.source_id = source.source_id
2153                             newdscf.poolfile_id = newf.file_id
2154                             session.add(newdscf)
2155
2156                             toremove.append(df)
2157
2158                     for df in toremove:
2159                         session.delete(df)
2160
2161                     # Flush our changes
2162                     session.flush()
2163
2164                     # Make sure that our source object is up-to-date
2165                     session.expire(source)
2166
2167         # Add changelog information to the database
2168         self.store_changelog()
2169
2170         # Install the files into the pool
2171         for newfile, entry in self.pkg.files.items():
2172             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2173             utils.move(newfile, destination)
2174             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2175             stats.accept_bytes += float(entry["size"])
2176
2177         # Copy the .changes file across for suite which need it.
2178         copy_changes = dict([(x.copychanges, '')
2179                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2180                              if x.copychanges is not None])
2181
2182         for dest in copy_changes.keys():
2183             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2184
2185         # We're done - commit the database changes
2186         session.commit()
2187         # Our SQL session will automatically start a new transaction after
2188         # the last commit
2189
2190         # Now ensure that the metadata has been added
2191         # This has to be done after we copy the files into the pool
2192         # For source if we have it:
2193         if self.pkg.changes["architecture"].has_key("source"):
2194             import_metadata_into_db(source, session)
2195
2196         # Now for any of our binaries
2197         for b in binaries:
2198             import_metadata_into_db(b, session)
2199
2200         session.commit()
2201
2202         # Move the .changes into the 'done' directory
2203         ye, mo, da = time.gmtime()[0:3]
2204         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2205         if not os.path.isdir(donedir):
2206             os.makedirs(donedir)
2207
2208         utils.move(self.pkg.changes_file,
2209                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2210
2211         if self.pkg.changes["architecture"].has_key("source"):
2212             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2213
2214         self.update_subst()
2215         self.Subst["__SUMMARY__"] = summary
2216         mail_message = utils.TemplateSubst(self.Subst,
2217                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2218         utils.send_mail(mail_message)
2219         self.announce(short_summary, 1)
2220
2221         ## Helper stuff for DebBugs Version Tracking
2222         if cnf.Find("Dir::BTSVersionTrack"):
2223             if self.pkg.changes["architecture"].has_key("source"):
2224                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2225                 version_history = os.fdopen(fd, 'w')
2226                 version_history.write(self.pkg.dsc["bts changelog"])
2227                 version_history.close()
2228                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2229                                       self.pkg.changes_file[:-8]+".versions")
2230                 os.rename(temp_filename, filename)
2231                 os.chmod(filename, 0o644)
2232
2233             # Write out the binary -> source mapping.
2234             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2235             debinfo = os.fdopen(fd, 'w')
2236             for name, entry in sorted(self.pkg.files.items()):
2237                 if entry["type"] == "deb":
2238                     line = " ".join([entry["package"], entry["version"],
2239                                      entry["architecture"], entry["source package"],
2240                                      entry["source version"]])
2241                     debinfo.write(line+"\n")
2242             debinfo.close()
2243             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2244                                   self.pkg.changes_file[:-8]+".debinfo")
2245             os.rename(temp_filename, filename)
2246             os.chmod(filename, 0o644)
2247
2248         session.commit()
2249
2250         # Set up our copy queues (e.g. buildd queues)
2251         for suite_name in self.pkg.changes["distribution"].keys():
2252             suite = get_suite(suite_name, session)
2253             for q in suite.copy_queues:
2254                 for f in poolfiles:
2255                     q.add_file_from_pool(f)
2256
2257         session.commit()
2258
2259         # Finally...
2260         stats.accept_count += 1
2261
2262     def check_override(self):
2263         """
2264         Checks override entries for validity. Mails "Override disparity" warnings,
2265         if that feature is enabled.
2266
2267         Abandons the check if
2268           - override disparity checks are disabled
2269           - mail sending is disabled
2270         """
2271
2272         cnf = Config()
2273
2274         # Abandon the check if override disparity checks have been disabled
2275         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2276             return
2277
2278         summary = self.pkg.check_override()
2279
2280         if summary == "":
2281             return
2282
2283         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2284
2285         self.update_subst()
2286         self.Subst["__SUMMARY__"] = summary
2287         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2288         utils.send_mail(mail_message)
2289         del self.Subst["__SUMMARY__"]
2290
2291     ###########################################################################
2292
2293     def remove(self, from_dir=None):
2294         """
2295         Used (for instance) in p-u to remove the package from unchecked
2296
2297         Also removes the package from holding area.
2298         """
2299         if from_dir is None:
2300             from_dir = self.pkg.directory
2301         h = Holding()
2302
2303         for f in self.pkg.files.keys():
2304             os.unlink(os.path.join(from_dir, f))
2305             if os.path.exists(os.path.join(h.holding_dir, f)):
2306                 os.unlink(os.path.join(h.holding_dir, f))
2307
2308         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2309         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2310             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2311
2312     ###########################################################################
2313
2314     def move_to_queue (self, queue):
2315         """
2316         Move files to a destination queue using the permissions in the table
2317         """
2318         h = Holding()
2319         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2320                    queue.path, perms=int(queue.change_perms, 8))
2321         for f in self.pkg.files.keys():
2322             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2323
2324     ###########################################################################
2325
2326     def force_reject(self, reject_files):
2327         """
2328         Forcefully move files from the current directory to the
2329         reject directory.  If any file already exists in the reject
2330         directory it will be moved to the morgue to make way for
2331         the new file.
2332
2333         @type reject_files: dict
2334         @param reject_files: file dictionary
2335
2336         """
2337
2338         cnf = Config()
2339
2340         for file_entry in reject_files:
2341             # Skip any files which don't exist or which we don't have permission to copy.
2342             if os.access(file_entry, os.R_OK) == 0:
2343                 continue
2344
2345             dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2346
2347             try:
2348                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2349             except OSError as e:
2350                 # File exists?  Let's find a new name by adding a number
2351                 if e.errno == errno.EEXIST:
2352                     try:
2353                         dest_file = utils.find_next_free(dest_file, 255)
2354                     except NoFreeFilenameError:
2355                         # Something's either gone badly Pete Tong, or
2356                         # someone is trying to exploit us.
2357                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2358                         return
2359
2360                     # Make sure we really got it
2361                     try:
2362                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2363                     except OSError as e:
2364                         # Likewise
2365                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2366                         return
2367                 else:
2368                     raise
2369             # If we got here, we own the destination file, so we can
2370             # safely overwrite it.
2371             utils.move(file_entry, dest_file, 1, perms=0o660)
2372             os.close(dest_fd)
2373
2374     ###########################################################################
2375     def do_reject (self, manual=0, reject_message="", notes=""):
2376         """
2377         Reject an upload. If called without a reject message or C{manual} is
2378         true, spawn an editor so the user can write one.
2379
2380         @type manual: bool
2381         @param manual: manual or automated rejection
2382
2383         @type reject_message: string
2384         @param reject_message: A reject message
2385
2386         @return: 0
2387
2388         """
2389         # If we weren't given a manual rejection message, spawn an
2390         # editor so the user can add one in...
2391         if manual and not reject_message:
2392             (fd, temp_filename) = utils.temp_filename()
2393             temp_file = os.fdopen(fd, 'w')
2394             if len(notes) > 0:
2395                 for note in notes:
2396                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2397                                     % (note.author, note.version, note.notedate, note.comment))
2398             temp_file.close()
2399             editor = os.environ.get("EDITOR","vi")
2400             answer = 'E'
2401             while answer == 'E':
2402                 os.system("%s %s" % (editor, temp_filename))
2403                 temp_fh = utils.open_file(temp_filename)
2404                 reject_message = "".join(temp_fh.readlines())
2405                 temp_fh.close()
2406                 print "Reject message:"
2407                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2408                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2409                 answer = "XXX"
2410                 while prompt.find(answer) == -1:
2411                     answer = utils.our_raw_input(prompt)
2412                     m = re_default_answer.search(prompt)
2413                     if answer == "":
2414                         answer = m.group(1)
2415                     answer = answer[:1].upper()
2416             os.unlink(temp_filename)
2417             if answer == 'A':
2418                 return 1
2419             elif answer == 'Q':
2420                 sys.exit(0)
2421
2422         print "Rejecting.\n"
2423
2424         cnf = Config()
2425
2426         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2427         reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2428         changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2429
2430         # Move all the files into the reject directory
2431         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2432         self.force_reject(reject_files)
2433
2434         # Change permissions of the .changes file to be world readable
2435         try:
2436             os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2437         except OSError as (errno, strerror):
2438             # Ignore 'Operation not permitted' error.
2439             if errno != 1:
2440                 raise
2441
2442         # If we fail here someone is probably trying to exploit the race
2443         # so let's just raise an exception ...
2444         if os.path.exists(reason_filename):
2445             os.unlink(reason_filename)
2446         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2447
2448         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2449
2450         self.update_subst()
2451         if not manual:
2452             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2453             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2454             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2455             os.write(reason_fd, reject_message)
2456             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2457         else:
2458             # Build up the rejection email
2459             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2460             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2461             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2462             self.Subst["__REJECT_MESSAGE__"] = ""
2463             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2464             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2465             # Write the rejection email out as the <foo>.reason file
2466             os.write(reason_fd, reject_mail_message)
2467
2468         del self.Subst["__REJECTOR_ADDRESS__"]
2469         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2470         del self.Subst["__CC__"]
2471
2472         os.close(reason_fd)
2473
2474         # Send the rejection mail
2475         utils.send_mail(reject_mail_message)
2476
2477         if self.logger:
2478             self.logger.log(["rejected", self.pkg.changes_file])
2479
2480         stats = SummaryStats()
2481         stats.reject_count += 1
2482         return 0
2483
2484     ################################################################################
2485     def in_override_p(self, package, component, suite, binary_type, filename, session):
2486         """
2487         Check if a package already has override entries in the DB
2488
2489         @type package: string
2490         @param package: package name
2491
2492         @type component: string
2493         @param component: database id of the component
2494
2495         @type suite: int
2496         @param suite: database id of the suite
2497
2498         @type binary_type: string
2499         @param binary_type: type of the package
2500
2501         @type filename: string
2502         @param filename: filename we check
2503
2504         @return: the database result. But noone cares anyway.
2505
2506         """
2507
2508         cnf = Config()
2509
2510         if binary_type == "": # must be source
2511             file_type = "dsc"
2512         else:
2513             file_type = binary_type
2514
2515         # Override suite name; used for example with proposed-updates
2516         oldsuite = get_suite(suite, session)
2517         if (not oldsuite is None) and oldsuite.overridesuite:
2518             suite = oldsuite.overridesuite
2519
2520         result = get_override(package, suite, component, file_type, session)
2521
2522         # If checking for a source package fall back on the binary override type
2523         if file_type == "dsc" and len(result) < 1:
2524             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2525
2526         # Remember the section and priority so we can check them later if appropriate
2527         if len(result) > 0:
2528             result = result[0]
2529             self.pkg.files[filename]["override section"] = result.section.section
2530             self.pkg.files[filename]["override priority"] = result.priority.priority
2531             return result
2532
2533         return None
2534
2535     ################################################################################
2536     def get_anyversion(self, sv_list, suite):
2537         """
2538         @type sv_list: list
2539         @param sv_list: list of (suite, version) tuples to check
2540
2541         @type suite: string
2542         @param suite: suite name
2543
2544         Description: TODO
2545         """
2546         Cnf = Config()
2547         anyversion = None
2548         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2549         for (s, v) in sv_list:
2550             if s in [ x.lower() for x in anysuite ]:
2551                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2552                     anyversion = v
2553
2554         return anyversion
2555
2556     ################################################################################
2557
2558     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2559         """
2560         @type sv_list: list
2561         @param sv_list: list of (suite, version) tuples to check
2562
2563         @type filename: string
2564         @param filename: XXX
2565
2566         @type new_version: string
2567         @param new_version: XXX
2568
2569         Ensure versions are newer than existing packages in target
2570         suites and that cross-suite version checking rules as
2571         set out in the conf file are satisfied.
2572         """
2573
2574         cnf = Config()
2575
2576         # Check versions for each target suite
2577         for target_suite in self.pkg.changes["distribution"].keys():
2578             # Check we can find the target suite
2579             ts = get_suite(target_suite)
2580             if ts is None:
2581                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2582                 continue
2583
2584             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2585             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2586
2587             # Enforce "must be newer than target suite" even if conffile omits it
2588             if target_suite not in must_be_newer_than:
2589                 must_be_newer_than.append(target_suite)
2590
2591             for (suite, existent_version) in sv_list:
2592                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2593
2594                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2595                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2596
2597                 if suite in must_be_older_than and vercmp > -1:
2598                     cansave = 0
2599
2600                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2601                         # we really use the other suite, ignoring the conflicting one ...
2602                         addsuite = self.pkg.changes["distribution-version"][suite]
2603
2604                         add_version = self.get_anyversion(sv_list, addsuite)
2605                         target_version = self.get_anyversion(sv_list, target_suite)
2606
2607                         if not add_version:
2608                             # not add_version can only happen if we map to a suite
2609                             # that doesn't enhance the suite we're propup'ing from.
2610                             # so "propup-ver x a b c; map a d" is a problem only if
2611                             # d doesn't enhance a.
2612                             #
2613                             # i think we could always propagate in this case, rather
2614                             # than complaining. either way, this isn't a REJECT issue
2615                             #
2616                             # And - we really should complain to the dorks who configured dak
2617                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2618                             self.pkg.changes.setdefault("propdistribution", {})
2619                             self.pkg.changes["propdistribution"][addsuite] = 1
2620                             cansave = 1
2621                         elif not target_version:
2622                             # not targets_version is true when the package is NEW
2623                             # we could just stick with the "...old version..." REJECT
2624                             # for this, I think.
2625                             self.rejects.append("Won't propogate NEW packages.")
2626                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2627                             # propogation would be redundant. no need to reject though.
2628                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2629                             cansave = 1
2630                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2631                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2632                             # propogate!!
2633                             self.warnings.append("Propogating upload to %s" % (addsuite))
2634                             self.pkg.changes.setdefault("propdistribution", {})
2635                             self.pkg.changes["propdistribution"][addsuite] = 1
2636                             cansave = 1
2637
2638                     if not cansave:
2639                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2640
2641     ################################################################################
2642     def check_binary_against_db(self, filename, session):
2643         # Ensure version is sane
2644         self.cross_suite_version_check( \
2645             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2646                 self.pkg.files[filename]["architecture"], session),
2647             filename, self.pkg.files[filename]["version"], sourceful=False)
2648
2649         # Check for any existing copies of the file
2650         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2651         q = q.filter_by(version=self.pkg.files[filename]["version"])
2652         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2653
2654         if q.count() > 0:
2655             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2656
2657     ################################################################################
2658
2659     def check_source_against_db(self, filename, session):
2660         source = self.pkg.dsc.get("source")
2661         version = self.pkg.dsc.get("version")
2662
2663         # Ensure version is sane
2664         self.cross_suite_version_check( \
2665             get_suite_version_by_source(source, session), filename, version,
2666             sourceful=True)
2667
2668     ################################################################################
2669     def check_dsc_against_db(self, filename, session):
2670         """
2671
2672         @warning: NB: this function can remove entries from the 'files' index [if
2673          the orig tarball is a duplicate of the one in the archive]; if
2674          you're iterating over 'files' and call this function as part of
2675          the loop, be sure to add a check to the top of the loop to
2676          ensure you haven't just tried to dereference the deleted entry.
2677
2678         """
2679
2680         Cnf = Config()
2681         self.pkg.orig_files = {} # XXX: do we need to clear it?
2682         orig_files = self.pkg.orig_files
2683
2684         # Try and find all files mentioned in the .dsc.  This has
2685         # to work harder to cope with the multiple possible
2686         # locations of an .orig.tar.gz.
2687         # The ordering on the select is needed to pick the newest orig
2688         # when it exists in multiple places.
2689         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2690             found = None
2691             if self.pkg.files.has_key(dsc_name):
2692                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2693                 actual_size = int(self.pkg.files[dsc_name]["size"])
2694                 found = "%s in incoming" % (dsc_name)
2695
2696                 # Check the file does not already exist in the archive
2697                 ql = get_poolfile_like_name(dsc_name, session)
2698
2699                 # Strip out anything that isn't '%s' or '/%s$'
2700                 for i in ql:
2701                     if not i.filename.endswith(dsc_name):
2702                         ql.remove(i)
2703
2704                 # "[dak] has not broken them.  [dak] has fixed a
2705                 # brokenness.  Your crappy hack exploited a bug in
2706                 # the old dinstall.
2707                 #
2708                 # "(Come on!  I thought it was always obvious that
2709                 # one just doesn't release different files with
2710                 # the same name and version.)"
2711                 #                        -- ajk@ on d-devel@l.d.o
2712
2713                 if len(ql) > 0:
2714                     # Ignore exact matches for .orig.tar.gz
2715                     match = 0
2716                     if re_is_orig_source.match(dsc_name):
2717                         for i in ql:
2718                             if self.pkg.files.has_key(dsc_name) and \
2719                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2720                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2721                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2722                                 # TODO: Don't delete the entry, just mark it as not needed
2723                                 # This would fix the stupidity of changing something we often iterate over
2724                                 # whilst we're doing it
2725                                 del self.pkg.files[dsc_name]
2726                                 dsc_entry["files id"] = i.file_id
2727                                 if not orig_files.has_key(dsc_name):
2728                                     orig_files[dsc_name] = {}
2729                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2730                                 match = 1
2731
2732                                 # Don't bitch that we couldn't find this file later
2733                                 try:
2734                                     self.later_check_files.remove(dsc_name)
2735                                 except ValueError:
2736                                     pass
2737
2738
2739                     if not match:
2740                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2741
2742             elif re_is_orig_source.match(dsc_name):
2743                 # Check in the pool
2744                 ql = get_poolfile_like_name(dsc_name, session)
2745
2746                 # Strip out anything that isn't '%s' or '/%s$'
2747                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2748                 for i in ql:
2749                     if not i.filename.endswith(dsc_name):
2750                         ql.remove(i)
2751
2752                 if len(ql) > 0:
2753                     # Unfortunately, we may get more than one match here if,
2754                     # for example, the package was in potato but had an -sa
2755                     # upload in woody.  So we need to choose the right one.
2756
2757                     # default to something sane in case we don't match any or have only one
2758                     x = ql[0]
2759
2760                     if len(ql) > 1:
2761                         for i in ql:
2762                             old_file = os.path.join(i.location.path, i.filename)
2763                             old_file_fh = utils.open_file(old_file)
2764                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2765                             old_file_fh.close()
2766                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2767                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2768                                 x = i
2769
2770                     old_file = os.path.join(i.location.path, i.filename)
2771                     old_file_fh = utils.open_file(old_file)
2772                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2773                     old_file_fh.close()
2774                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2775                     found = old_file
2776                     suite_type = x.location.archive_type
2777                     # need this for updating dsc_files in install()
2778                     dsc_entry["files id"] = x.file_id
2779                     # See install() in process-accepted...
2780                     if not orig_files.has_key(dsc_name):
2781                         orig_files[dsc_name] = {}
2782                     orig_files[dsc_name]["id"] = x.file_id
2783                     orig_files[dsc_name]["path"] = old_file
2784                     orig_files[dsc_name]["location"] = x.location.location_id
2785                 else:
2786                     # TODO: Determine queue list dynamically
2787                     # Not there? Check the queue directories...
2788                     for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2789                         queue = get_policy_queue(queue_name, session)
2790                         if not queue:
2791                             continue
2792
2793                         in_otherdir = os.path.join(queue.path, dsc_name)
2794
2795                         if os.path.exists(in_otherdir):
2796                             in_otherdir_fh = utils.open_file(in_otherdir)
2797                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2798                             in_otherdir_fh.close()
2799                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2800                             found = in_otherdir
2801                             if not orig_files.has_key(dsc_name):
2802                                 orig_files[dsc_name] = {}
2803                             orig_files[dsc_name]["path"] = in_otherdir
2804
2805                     if not found:
2806                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2807                         continue
2808             else:
2809                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2810                 continue
2811             if actual_md5 != dsc_entry["md5sum"]:
2812                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2813             if actual_size != int(dsc_entry["size"]):
2814                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2815
2816     ################################################################################
2817     # This is used by process-new and process-holding to recheck a changes file
2818     # at the time we're running.  It mainly wraps various other internal functions
2819     # and is similar to accepted_checks - these should probably be tidied up
2820     # and combined
2821     def recheck(self, session):
2822         cnf = Config()
2823         for f in self.pkg.files.keys():
2824             # The .orig.tar.gz can disappear out from under us is it's a
2825             # duplicate of one in the archive.
2826             if not self.pkg.files.has_key(f):
2827                 continue
2828
2829             entry = self.pkg.files[f]
2830
2831             # Check that the source still exists
2832             if entry["type"] == "deb":
2833                 source_version = entry["source version"]
2834                 source_package = entry["source package"]
2835                 if not self.pkg.changes["architecture"].has_key("source") \
2836                    and not source_exists(source_package, source_version, \
2837                     suites = self.pkg.changes["distribution"].keys(), session = session):
2838                     source_epochless_version = re_no_epoch.sub('', source_version)
2839                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2840                     found = False
2841                     for queue_name in ["embargoed", "unembargoed", "newstage"]:
2842                         queue = get_policy_queue(queue_name, session)
2843                         if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2844                             found = True
2845                     if not found:
2846                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2847
2848             # Version and file overwrite checks
2849             if entry["type"] == "deb":
2850                 self.check_binary_against_db(f, session)
2851             elif entry["type"] == "dsc":
2852                 self.check_source_against_db(f, session)
2853                 self.check_dsc_against_db(f, session)
2854
2855     ################################################################################
2856     def accepted_checks(self, overwrite_checks, session):
2857         # Recheck anything that relies on the database; since that's not
2858         # frozen between accept and our run time when called from p-a.
2859
2860         # overwrite_checks is set to False when installing to stable/oldstable
2861
2862         propogate={}
2863         nopropogate={}
2864
2865         # Find the .dsc (again)
2866         dsc_filename = None
2867         for f in self.pkg.files.keys():
2868             if self.pkg.files[f]["type"] == "dsc":
2869                 dsc_filename = f
2870
2871         for checkfile in self.pkg.files.keys():
2872             # The .orig.tar.gz can disappear out from under us is it's a
2873             # duplicate of one in the archive.
2874             if not self.pkg.files.has_key(checkfile):
2875                 continue
2876
2877             entry = self.pkg.files[checkfile]
2878
2879             # Check that the source still exists
2880             if entry["type"] == "deb":
2881                 source_version = entry["source version"]
2882                 source_package = entry["source package"]
2883                 if not self.pkg.changes["architecture"].has_key("source") \
2884                    and not source_exists(source_package, source_version, \
2885                     suites = self.pkg.changes["distribution"].keys(), \
2886                     session = session):
2887                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2888
2889             # Version and file overwrite checks
2890             if overwrite_checks:
2891                 if entry["type"] == "deb":
2892                     self.check_binary_against_db(checkfile, session)
2893                 elif entry["type"] == "dsc":
2894                     self.check_source_against_db(checkfile, session)
2895                     self.check_dsc_against_db(dsc_filename, session)
2896
2897             # propogate in the case it is in the override tables:
2898             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2899                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2900                     propogate[suite] = 1
2901                 else:
2902                     nopropogate[suite] = 1
2903
2904         for suite in propogate.keys():
2905             if suite in nopropogate:
2906                 continue
2907             self.pkg.changes["distribution"][suite] = 1
2908
2909         for checkfile in self.pkg.files.keys():
2910             # Check the package is still in the override tables
2911             for suite in self.pkg.changes["distribution"].keys():
2912                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2913                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2914
2915     ################################################################################
2916     # If any file of an upload has a recent mtime then chances are good
2917     # the file is still being uploaded.
2918
2919     def upload_too_new(self):
2920         cnf = Config()
2921         too_new = False
2922         # Move back to the original directory to get accurate time stamps
2923         cwd = os.getcwd()
2924         os.chdir(self.pkg.directory)
2925         file_list = self.pkg.files.keys()
2926         file_list.extend(self.pkg.dsc_files.keys())
2927         file_list.append(self.pkg.changes_file)
2928         for f in file_list:
2929             try:
2930                 last_modified = time.time()-os.path.getmtime(f)
2931                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2932                     too_new = True
2933                     break
2934             except:
2935                 pass
2936
2937         os.chdir(cwd)
2938         return too_new
2939
2940     def store_changelog(self):
2941
2942         # Skip binary-only upload if it is not a bin-NMU
2943         if not self.pkg.changes['architecture'].has_key('source'):
2944             from daklib.regexes import re_bin_only_nmu
2945             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2946                 return
2947
2948         session = DBConn().session()
2949
2950         # Check if upload already has a changelog entry
2951         query = """SELECT changelog_id FROM changes WHERE source = :source
2952                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2953         if session.execute(query, {'source': self.pkg.changes['source'], \
2954                                    'version': self.pkg.changes['version'], \
2955                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2956             session.commit()
2957             return
2958
2959         # Add current changelog text into changelogs_text table, return created ID
2960         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2961         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2962
2963         # Link ID to the upload available in changes table
2964         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2965                    AND version = :version AND architecture = :architecture"""
2966         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2967                                 'version': self.pkg.changes['version'], \
2968                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2969
2970         session.commit()